From 07a10e537a5ef56f5002b5852cabc609ff2bfb78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A1bio=20Barboza=20Coelho=20de=20Souza?= Date: Tue, 23 Jul 2019 01:32:56 -0300 Subject: [PATCH] nodemon, sucrase, docker --- node_modules/.bin/atob | 1 + node_modules/.bin/detect-libc | 1 + node_modules/.bin/is-ci | 1 + node_modules/.bin/mime | 1 + node_modules/.bin/mkdirp | 1 + node_modules/.bin/needle | 1 + node_modules/.bin/node-pre-gyp | 1 + node_modules/.bin/nodemon | 1 + node_modules/.bin/nodetouch | 1 + node_modules/.bin/nopt | 1 + node_modules/.bin/rc | 1 + node_modules/.bin/rimraf | 1 + node_modules/.bin/semver | 1 + node_modules/.bin/sucrase | 1 + node_modules/.bin/sucrase-node | 1 + node_modules/.bin/which | 1 + node_modules/.yarn-integrity | 368 + node_modules/abbrev/LICENSE | 46 + node_modules/abbrev/README.md | 23 + node_modules/abbrev/abbrev.js | 61 + node_modules/abbrev/package.json | 21 + node_modules/accepts/HISTORY.md | 236 + node_modules/accepts/LICENSE | 23 + node_modules/accepts/README.md | 142 + node_modules/accepts/index.js | 238 + node_modules/accepts/package.json | 47 + node_modules/ansi-align/CHANGELOG.md | 36 + node_modules/ansi-align/LICENSE | 13 + node_modules/ansi-align/README.md | 79 + node_modules/ansi-align/index.js | 61 + node_modules/ansi-align/package.json | 43 + node_modules/ansi-regex/index.js | 4 + node_modules/ansi-regex/license | 21 + node_modules/ansi-regex/package.json | 64 + node_modules/ansi-regex/readme.md | 39 + node_modules/ansi-styles/index.js | 165 + node_modules/ansi-styles/license | 9 + node_modules/ansi-styles/package.json | 56 + node_modules/ansi-styles/readme.md | 147 + node_modules/any-promise/.jshintrc | 4 + node_modules/any-promise/.npmignore | 7 + node_modules/any-promise/LICENSE | 19 + node_modules/any-promise/README.md | 161 + node_modules/any-promise/implementation.d.ts | 3 + node_modules/any-promise/implementation.js | 1 + node_modules/any-promise/index.d.ts | 73 + node_modules/any-promise/index.js | 1 + node_modules/any-promise/loader.js | 78 + node_modules/any-promise/optional.js | 6 + node_modules/any-promise/package.json | 45 + node_modules/any-promise/register-shim.js | 18 + node_modules/any-promise/register.d.ts | 17 + node_modules/any-promise/register.js | 94 + .../any-promise/register/bluebird.d.ts | 1 + node_modules/any-promise/register/bluebird.js | 2 + .../any-promise/register/es6-promise.d.ts | 1 + .../any-promise/register/es6-promise.js | 2 + node_modules/any-promise/register/lie.d.ts | 1 + node_modules/any-promise/register/lie.js | 2 + .../register/native-promise-only.d.ts | 1 + .../register/native-promise-only.js | 2 + node_modules/any-promise/register/pinkie.d.ts | 1 + node_modules/any-promise/register/pinkie.js | 2 + .../any-promise/register/promise.d.ts | 1 + node_modules/any-promise/register/promise.js | 2 + node_modules/any-promise/register/q.d.ts | 1 + node_modules/any-promise/register/q.js | 2 + node_modules/any-promise/register/rsvp.d.ts | 1 + node_modules/any-promise/register/rsvp.js | 2 + node_modules/any-promise/register/vow.d.ts | 1 + node_modules/any-promise/register/vow.js | 2 + node_modules/any-promise/register/when.d.ts | 1 + node_modules/any-promise/register/when.js | 2 + node_modules/anymatch/LICENSE | 15 + node_modules/anymatch/README.md | 99 + node_modules/anymatch/index.js | 67 + .../node_modules/normalize-path/LICENSE | 21 + .../node_modules/normalize-path/README.md | 92 + .../node_modules/normalize-path/index.js | 19 + .../node_modules/normalize-path/package.json | 78 + node_modules/anymatch/package.json | 47 + node_modules/aproba/LICENSE | 14 + node_modules/aproba/README.md | 94 + node_modules/aproba/index.js | 105 + node_modules/aproba/package.json | 34 + node_modules/are-we-there-yet/CHANGES.md | 37 + node_modules/are-we-there-yet/LICENSE | 5 + node_modules/are-we-there-yet/README.md | 195 + node_modules/are-we-there-yet/index.js | 4 + node_modules/are-we-there-yet/package.json | 35 + node_modules/are-we-there-yet/tracker-base.js | 11 + .../are-we-there-yet/tracker-group.js | 107 + .../are-we-there-yet/tracker-stream.js | 36 + node_modules/are-we-there-yet/tracker.js | 30 + node_modules/arr-diff/LICENSE | 21 + node_modules/arr-diff/README.md | 130 + node_modules/arr-diff/index.js | 47 + node_modules/arr-diff/package.json | 69 + node_modules/arr-flatten/LICENSE | 21 + node_modules/arr-flatten/README.md | 86 + node_modules/arr-flatten/index.js | 22 + node_modules/arr-flatten/package.json | 76 + node_modules/arr-union/LICENSE | 21 + node_modules/arr-union/README.md | 99 + node_modules/arr-union/index.js | 29 + node_modules/arr-union/package.json | 76 + node_modules/array-flatten/LICENSE | 21 + node_modules/array-flatten/README.md | 43 + node_modules/array-flatten/array-flatten.js | 64 + node_modules/array-flatten/package.json | 39 + node_modules/array-unique/LICENSE | 21 + node_modules/array-unique/README.md | 77 + node_modules/array-unique/index.js | 43 + node_modules/array-unique/package.json | 62 + node_modules/assign-symbols/LICENSE | 21 + node_modules/assign-symbols/README.md | 73 + node_modules/assign-symbols/index.js | 40 + node_modules/assign-symbols/package.json | 40 + node_modules/async-each/README.md | 52 + node_modules/async-each/index.js | 38 + node_modules/async-each/package.json | 20 + node_modules/atob/LICENSE | 230 + node_modules/atob/LICENSE.DOCS | 319 + node_modules/atob/README.md | 49 + node_modules/atob/bin/atob.js | 6 + node_modules/atob/bower.json | 24 + node_modules/atob/browser-atob.js | 44 + node_modules/atob/node-atob.js | 7 + node_modules/atob/package.json | 24 + node_modules/atob/test.js | 18 + node_modules/balanced-match/.npmignore | 5 + node_modules/balanced-match/LICENSE.md | 21 + node_modules/balanced-match/README.md | 91 + node_modules/balanced-match/index.js | 59 + node_modules/balanced-match/package.json | 49 + node_modules/base/LICENSE | 21 + node_modules/base/README.md | 491 ++ node_modules/base/index.js | 435 + .../base/node_modules/define-property/LICENSE | 21 + .../node_modules/define-property/README.md | 95 + .../node_modules/define-property/index.js | 31 + .../node_modules/define-property/package.json | 62 + node_modules/base/package.json | 111 + .../binary-extensions/binary-extensions.json | 252 + node_modules/binary-extensions/license | 9 + node_modules/binary-extensions/package.json | 36 + node_modules/binary-extensions/readme.md | 33 + node_modules/body-parser/HISTORY.md | 609 ++ node_modules/body-parser/LICENSE | 23 + node_modules/body-parser/README.md | 443 + node_modules/body-parser/index.js | 157 + node_modules/body-parser/lib/read.js | 181 + node_modules/body-parser/lib/types/json.js | 230 + node_modules/body-parser/lib/types/raw.js | 101 + node_modules/body-parser/lib/types/text.js | 121 + .../body-parser/lib/types/urlencoded.js | 284 + node_modules/body-parser/package.json | 52 + node_modules/boxen/index.js | 138 + node_modules/boxen/license | 9 + node_modules/boxen/package.json | 47 + node_modules/boxen/readme.md | 175 + node_modules/brace-expansion/LICENSE | 21 + node_modules/brace-expansion/README.md | 129 + node_modules/brace-expansion/index.js | 201 + node_modules/brace-expansion/package.json | 47 + node_modules/braces/LICENSE | 21 + node_modules/braces/README.md | 640 ++ node_modules/braces/index.js | 318 + node_modules/braces/lib/braces.js | 104 + node_modules/braces/lib/compilers.js | 282 + node_modules/braces/lib/parsers.js | 360 + node_modules/braces/lib/utils.js | 343 + node_modules/braces/package.json | 108 + node_modules/bytes/History.md | 87 + node_modules/bytes/LICENSE | 23 + node_modules/bytes/Readme.md | 126 + node_modules/bytes/index.js | 162 + node_modules/bytes/package.json | 41 + node_modules/cache-base/LICENSE | 21 + node_modules/cache-base/README.md | 291 + node_modules/cache-base/index.js | 261 + node_modules/cache-base/package.json | 82 + node_modules/camelcase/index.js | 64 + node_modules/camelcase/license | 21 + node_modules/camelcase/package.json | 42 + node_modules/camelcase/readme.md | 57 + node_modules/capture-stack-trace/index.js | 18 + node_modules/capture-stack-trace/license | 21 + node_modules/capture-stack-trace/package.json | 29 + node_modules/capture-stack-trace/readme.md | 36 + node_modules/chalk/index.js | 228 + node_modules/chalk/index.js.flow | 93 + node_modules/chalk/license | 9 + node_modules/chalk/package.json | 71 + node_modules/chalk/readme.md | 314 + node_modules/chalk/templates.js | 128 + node_modules/chalk/types/index.d.ts | 97 + node_modules/chokidar/CHANGELOG.md | 317 + node_modules/chokidar/README.md | 294 + node_modules/chokidar/index.js | 747 ++ node_modules/chokidar/lib/fsevents-handler.js | 408 + node_modules/chokidar/lib/nodefs-handler.js | 506 ++ node_modules/chokidar/package.json | 63 + node_modules/chokidar/types/index.d.ts | 191 + node_modules/chownr/LICENSE | 15 + node_modules/chownr/README.md | 3 + node_modules/chownr/chownr.js | 125 + node_modules/chownr/package.json | 26 + node_modules/ci-info/CHANGELOG.md | 62 + node_modules/ci-info/LICENSE | 21 + node_modules/ci-info/README.md | 107 + node_modules/ci-info/index.js | 66 + node_modules/ci-info/package.json | 36 + node_modules/ci-info/vendors.json | 152 + node_modules/class-utils/LICENSE | 21 + node_modules/class-utils/README.md | 300 + node_modules/class-utils/index.js | 370 + node_modules/class-utils/package.json | 90 + node_modules/cli-boxes/boxes.json | 50 + node_modules/cli-boxes/index.js | 2 + node_modules/cli-boxes/license | 21 + node_modules/cli-boxes/package.json | 39 + node_modules/cli-boxes/readme.md | 94 + node_modules/code-point-at/index.js | 32 + node_modules/code-point-at/license | 21 + node_modules/code-point-at/package.json | 38 + node_modules/code-point-at/readme.md | 32 + node_modules/collection-visit/LICENSE | 21 + node_modules/collection-visit/README.md | 89 + node_modules/collection-visit/index.js | 30 + node_modules/collection-visit/package.json | 76 + node_modules/color-convert/CHANGELOG.md | 54 + node_modules/color-convert/LICENSE | 21 + node_modules/color-convert/README.md | 68 + node_modules/color-convert/conversions.js | 868 ++ node_modules/color-convert/index.js | 78 + node_modules/color-convert/package.json | 46 + node_modules/color-convert/route.js | 97 + node_modules/color-name/.eslintrc.json | 43 + node_modules/color-name/.npmignore | 107 + node_modules/color-name/LICENSE | 8 + node_modules/color-name/README.md | 11 + node_modules/color-name/index.js | 152 + node_modules/color-name/package.json | 25 + node_modules/color-name/test.js | 7 + node_modules/commander/CHANGELOG.md | 408 + node_modules/commander/LICENSE | 22 + node_modules/commander/Readme.md | 428 + node_modules/commander/index.js | 1224 +++ node_modules/commander/package.json | 38 + node_modules/commander/typings/index.d.ts | 310 + node_modules/component-emitter/History.md | 75 + node_modules/component-emitter/LICENSE | 24 + node_modules/component-emitter/Readme.md | 74 + node_modules/component-emitter/index.js | 175 + node_modules/component-emitter/package.json | 27 + node_modules/concat-map/.travis.yml | 4 + node_modules/concat-map/LICENSE | 18 + node_modules/concat-map/README.markdown | 62 + node_modules/concat-map/example/map.js | 6 + node_modules/concat-map/index.js | 13 + node_modules/concat-map/package.json | 43 + node_modules/concat-map/test/map.js | 39 + node_modules/configstore/index.js | 106 + node_modules/configstore/license | 9 + node_modules/configstore/package.json | 47 + node_modules/configstore/readme.md | 116 + node_modules/console-control-strings/LICENSE | 13 + .../console-control-strings/README.md | 145 + .../console-control-strings/README.md~ | 140 + node_modules/console-control-strings/index.js | 125 + .../console-control-strings/package.json | 27 + node_modules/content-disposition/HISTORY.md | 55 + node_modules/content-disposition/LICENSE | 22 + node_modules/content-disposition/README.md | 148 + node_modules/content-disposition/index.js | 458 + node_modules/content-disposition/package.json | 44 + node_modules/content-type/HISTORY.md | 24 + node_modules/content-type/LICENSE | 22 + node_modules/content-type/README.md | 92 + node_modules/content-type/index.js | 222 + node_modules/content-type/package.json | 40 + node_modules/cookie-signature/.npmignore | 4 + node_modules/cookie-signature/History.md | 38 + node_modules/cookie-signature/Readme.md | 42 + node_modules/cookie-signature/index.js | 51 + node_modules/cookie-signature/package.json | 18 + node_modules/cookie/HISTORY.md | 123 + node_modules/cookie/LICENSE | 24 + node_modules/cookie/README.md | 253 + node_modules/cookie/index.js | 198 + node_modules/cookie/package.json | 40 + node_modules/copy-descriptor/LICENSE | 21 + node_modules/copy-descriptor/index.js | 81 + node_modules/copy-descriptor/package.json | 56 + node_modules/core-util-is/LICENSE | 19 + node_modules/core-util-is/README.md | 3 + node_modules/core-util-is/float.patch | 604 ++ node_modules/core-util-is/lib/util.js | 107 + node_modules/core-util-is/package.json | 32 + node_modules/core-util-is/test.js | 68 + node_modules/create-error-class/index.js | 44 + node_modules/create-error-class/license | 21 + node_modules/create-error-class/package.json | 30 + node_modules/create-error-class/readme.md | 54 + node_modules/cross-spawn/CHANGELOG.md | 6 + node_modules/cross-spawn/LICENSE | 19 + node_modules/cross-spawn/README.md | 85 + node_modules/cross-spawn/index.js | 59 + node_modules/cross-spawn/lib/enoent.js | 73 + node_modules/cross-spawn/lib/parse.js | 113 + .../cross-spawn/lib/util/escapeArgument.js | 30 + .../cross-spawn/lib/util/escapeCommand.js | 12 + .../lib/util/hasEmptyArgumentBug.js | 18 + .../cross-spawn/lib/util/readShebang.js | 37 + .../cross-spawn/lib/util/resolveCommand.js | 31 + .../cross-spawn/node_modules/.bin/which | 1 + node_modules/cross-spawn/package.json | 53 + node_modules/crypto-random-string/index.js | 10 + node_modules/crypto-random-string/license | 21 + .../crypto-random-string/package.json | 43 + node_modules/crypto-random-string/readme.md | 49 + node_modules/debug/.coveralls.yml | 1 + node_modules/debug/.eslintrc | 11 + node_modules/debug/.npmignore | 9 + node_modules/debug/.travis.yml | 14 + node_modules/debug/CHANGELOG.md | 362 + node_modules/debug/LICENSE | 19 + node_modules/debug/Makefile | 50 + node_modules/debug/README.md | 312 + node_modules/debug/component.json | 19 + node_modules/debug/karma.conf.js | 70 + node_modules/debug/node.js | 1 + node_modules/debug/package.json | 49 + node_modules/debug/src/browser.js | 185 + node_modules/debug/src/debug.js | 202 + node_modules/debug/src/index.js | 10 + node_modules/debug/src/inspector-log.js | 15 + node_modules/debug/src/node.js | 248 + node_modules/decode-uri-component/index.js | 94 + node_modules/decode-uri-component/license | 21 + .../decode-uri-component/package.json | 37 + node_modules/decode-uri-component/readme.md | 70 + node_modules/deep-extend/CHANGELOG.md | 46 + node_modules/deep-extend/LICENSE | 20 + node_modules/deep-extend/README.md | 91 + node_modules/deep-extend/index.js | 1 + node_modules/deep-extend/lib/deep-extend.js | 150 + node_modules/deep-extend/package.json | 62 + node_modules/define-property/LICENSE | 21 + node_modules/define-property/README.md | 77 + node_modules/define-property/index.js | 31 + .../is-accessor-descriptor/LICENSE | 21 + .../is-accessor-descriptor/README.md | 123 + .../is-accessor-descriptor/index.js | 69 + .../node_modules/kind-of/LICENSE | 21 + .../node_modules/kind-of/README.md | 261 + .../node_modules/kind-of/index.js | 116 + .../node_modules/kind-of/package.json | 90 + .../is-accessor-descriptor/package.json | 61 + .../node_modules/is-data-descriptor/LICENSE | 21 + .../node_modules/is-data-descriptor/README.md | 128 + .../node_modules/is-data-descriptor/index.js | 55 + .../node_modules/kind-of/LICENSE | 21 + .../node_modules/kind-of/README.md | 261 + .../node_modules/kind-of/index.js | 116 + .../node_modules/kind-of/package.json | 90 + .../is-data-descriptor/package.json | 60 + .../node_modules/is-descriptor/LICENSE | 21 + .../node_modules/is-descriptor/README.md | 193 + .../node_modules/is-descriptor/index.js | 22 + .../node_modules/is-descriptor/package.json | 75 + .../node_modules/kind-of/LICENSE | 21 + .../node_modules/kind-of/README.md | 342 + .../node_modules/kind-of/index.js | 147 + .../node_modules/kind-of/package.json | 91 + node_modules/define-property/package.json | 51 + node_modules/delegates/.npmignore | 1 + node_modules/delegates/History.md | 22 + node_modules/delegates/License | 20 + node_modules/delegates/Makefile | 8 + node_modules/delegates/Readme.md | 94 + node_modules/delegates/index.js | 121 + node_modules/delegates/package.json | 13 + node_modules/delegates/test/index.js | 94 + node_modules/depd/History.md | 96 + node_modules/depd/LICENSE | 22 + node_modules/depd/Readme.md | 280 + node_modules/depd/index.js | 522 ++ node_modules/depd/lib/browser/index.js | 77 + .../depd/lib/compat/callsite-tostring.js | 103 + .../depd/lib/compat/event-listener-count.js | 22 + node_modules/depd/lib/compat/index.js | 79 + node_modules/depd/package.json | 41 + node_modules/destroy/LICENSE | 22 + node_modules/destroy/README.md | 60 + node_modules/destroy/index.js | 75 + node_modules/destroy/package.json | 37 + node_modules/detect-libc/.npmignore | 7 + node_modules/detect-libc/LICENSE | 201 + node_modules/detect-libc/README.md | 78 + node_modules/detect-libc/bin/detect-libc.js | 18 + node_modules/detect-libc/lib/detect-libc.js | 92 + node_modules/detect-libc/package.json | 35 + node_modules/dot-prop/index.js | 123 + node_modules/dot-prop/license | 21 + node_modules/dot-prop/package.json | 48 + node_modules/dot-prop/readme.md | 103 + node_modules/duplexer3/LICENSE.md | 26 + node_modules/duplexer3/README.md | 115 + node_modules/duplexer3/index.js | 76 + node_modules/duplexer3/package.json | 28 + node_modules/ee-first/LICENSE | 22 + node_modules/ee-first/README.md | 80 + node_modules/ee-first/index.js | 95 + node_modules/ee-first/package.json | 29 + node_modules/encodeurl/HISTORY.md | 14 + node_modules/encodeurl/LICENSE | 22 + node_modules/encodeurl/README.md | 128 + node_modules/encodeurl/index.js | 60 + node_modules/encodeurl/package.json | 40 + node_modules/escape-html/LICENSE | 24 + node_modules/escape-html/Readme.md | 43 + node_modules/escape-html/index.js | 78 + node_modules/escape-html/package.json | 24 + node_modules/escape-string-regexp/index.js | 11 + node_modules/escape-string-regexp/license | 21 + .../escape-string-regexp/package.json | 41 + node_modules/escape-string-regexp/readme.md | 27 + node_modules/etag/HISTORY.md | 83 + node_modules/etag/LICENSE | 22 + node_modules/etag/README.md | 159 + node_modules/etag/index.js | 131 + node_modules/etag/package.json | 47 + node_modules/execa/index.js | 309 + node_modules/execa/lib/errname.js | 37 + node_modules/execa/lib/stdio.js | 41 + node_modules/execa/license | 9 + node_modules/execa/package.json | 76 + node_modules/execa/readme.md | 279 + node_modules/expand-brackets/LICENSE | 21 + node_modules/expand-brackets/README.md | 302 + node_modules/expand-brackets/changelog.md | 35 + node_modules/expand-brackets/index.js | 211 + node_modules/expand-brackets/lib/compilers.js | 87 + node_modules/expand-brackets/lib/parsers.js | 219 + node_modules/expand-brackets/lib/utils.js | 34 + node_modules/expand-brackets/package.json | 85 + node_modules/express/History.md | 3477 ++++++++ node_modules/express/LICENSE | 24 + node_modules/express/Readme.md | 155 + node_modules/express/index.js | 11 + node_modules/express/lib/application.js | 644 ++ node_modules/express/lib/express.js | 116 + node_modules/express/lib/middleware/init.js | 43 + node_modules/express/lib/middleware/query.js | 47 + node_modules/express/lib/request.js | 525 ++ node_modules/express/lib/response.js | 1142 +++ node_modules/express/lib/router/index.js | 662 ++ node_modules/express/lib/router/layer.js | 181 + node_modules/express/lib/router/route.js | 216 + node_modules/express/lib/utils.js | 306 + node_modules/express/lib/view.js | 182 + node_modules/express/package.json | 98 + node_modules/extend-shallow/LICENSE | 21 + node_modules/extend-shallow/README.md | 61 + node_modules/extend-shallow/index.js | 33 + node_modules/extend-shallow/package.json | 56 + node_modules/extglob/LICENSE | 21 + node_modules/extglob/README.md | 362 + node_modules/extglob/changelog.md | 25 + node_modules/extglob/index.js | 331 + node_modules/extglob/lib/compilers.js | 169 + node_modules/extglob/lib/extglob.js | 78 + node_modules/extglob/lib/parsers.js | 156 + node_modules/extglob/lib/utils.js | 69 + .../node_modules/define-property/LICENSE | 21 + .../node_modules/define-property/README.md | 95 + .../node_modules/define-property/index.js | 31 + .../node_modules/define-property/package.json | 62 + node_modules/extglob/package.json | 108 + node_modules/fill-range/LICENSE | 21 + node_modules/fill-range/README.md | 250 + node_modules/fill-range/index.js | 208 + node_modules/fill-range/package.json | 82 + node_modules/finalhandler/HISTORY.md | 187 + node_modules/finalhandler/LICENSE | 22 + node_modules/finalhandler/README.md | 148 + node_modules/finalhandler/index.js | 331 + node_modules/finalhandler/package.json | 45 + node_modules/for-in/LICENSE | 21 + node_modules/for-in/README.md | 85 + node_modules/for-in/index.js | 16 + node_modules/for-in/package.json | 68 + node_modules/forwarded/HISTORY.md | 16 + node_modules/forwarded/LICENSE | 22 + node_modules/forwarded/README.md | 57 + node_modules/forwarded/index.js | 76 + node_modules/forwarded/package.json | 43 + node_modules/fragment-cache/LICENSE | 21 + node_modules/fragment-cache/README.md | 156 + node_modules/fragment-cache/index.js | 128 + node_modules/fragment-cache/package.json | 60 + node_modules/fresh/HISTORY.md | 70 + node_modules/fresh/LICENSE | 23 + node_modules/fresh/README.md | 119 + node_modules/fresh/index.js | 137 + node_modules/fresh/package.json | 46 + node_modules/fs-minipass/LICENSE | 15 + node_modules/fs-minipass/README.md | 70 + node_modules/fs-minipass/index.js | 386 + node_modules/fs-minipass/package.json | 36 + node_modules/fs.realpath/LICENSE | 43 + node_modules/fs.realpath/README.md | 33 + node_modules/fs.realpath/index.js | 66 + node_modules/fs.realpath/old.js | 303 + node_modules/fs.realpath/package.json | 26 + node_modules/fsevents/.travis.yml | 101 + node_modules/fsevents/ISSUE_TEMPLATE.md | 8 + node_modules/fsevents/LICENSE | 22 + node_modules/fsevents/Readme.md | 78 + node_modules/fsevents/binding.gyp | 29 + node_modules/fsevents/fsevents.cc | 88 + node_modules/fsevents/fsevents.js | 108 + node_modules/fsevents/install.js | 7 + .../Release/node-v72-darwin-x64/fse.node | Bin 0 -> 37992 bytes .../fsevents/node_modules/.bin/node-pre-gyp | 1 + .../fsevents/node_modules/abbrev/LICENSE | 46 + .../fsevents/node_modules/abbrev/README.md | 23 + .../fsevents/node_modules/abbrev/abbrev.js | 61 + .../fsevents/node_modules/abbrev/package.json | 59 + .../fsevents/node_modules/ansi-regex/index.js | 4 + .../fsevents/node_modules/ansi-regex/license | 21 + .../node_modules/ansi-regex/package.json | 111 + .../node_modules/ansi-regex/readme.md | 39 + .../fsevents/node_modules/aproba/LICENSE | 14 + .../fsevents/node_modules/aproba/README.md | 94 + .../fsevents/node_modules/aproba/index.js | 105 + .../fsevents/node_modules/aproba/package.json | 65 + .../node_modules/are-we-there-yet/CHANGES.md | 37 + .../node_modules/are-we-there-yet/LICENSE | 5 + .../node_modules/are-we-there-yet/README.md | 195 + .../node_modules/are-we-there-yet/index.js | 4 + .../are-we-there-yet/package.json | 66 + .../are-we-there-yet/tracker-base.js | 11 + .../are-we-there-yet/tracker-group.js | 107 + .../are-we-there-yet/tracker-stream.js | 36 + .../node_modules/are-we-there-yet/tracker.js | 30 + .../node_modules/balanced-match/.npmignore | 5 + .../node_modules/balanced-match/LICENSE.md | 21 + .../node_modules/balanced-match/README.md | 91 + .../node_modules/balanced-match/index.js | 59 + .../node_modules/balanced-match/package.json | 80 + .../node_modules/brace-expansion/LICENSE | 21 + .../node_modules/brace-expansion/README.md | 129 + .../node_modules/brace-expansion/index.js | 201 + .../node_modules/brace-expansion/package.json | 78 + .../fsevents/node_modules/chownr/LICENSE | 15 + .../fsevents/node_modules/chownr/README.md | 3 + .../fsevents/node_modules/chownr/chownr.js | 88 + .../fsevents/node_modules/chownr/package.json | 62 + .../node_modules/code-point-at/index.js | 32 + .../node_modules/code-point-at/license | 21 + .../node_modules/code-point-at/package.json | 73 + .../node_modules/code-point-at/readme.md | 32 + .../node_modules/concat-map/.travis.yml | 4 + .../fsevents/node_modules/concat-map/LICENSE | 18 + .../node_modules/concat-map/README.markdown | 62 + .../node_modules/concat-map/example/map.js | 6 + .../fsevents/node_modules/concat-map/index.js | 13 + .../node_modules/concat-map/package.json | 91 + .../node_modules/concat-map/test/map.js | 39 + .../console-control-strings/LICENSE | 13 + .../console-control-strings/README.md | 145 + .../console-control-strings/README.md~ | 140 + .../console-control-strings/index.js | 125 + .../console-control-strings/package.json | 64 + .../node_modules/core-util-is/LICENSE | 19 + .../node_modules/core-util-is/README.md | 3 + .../node_modules/core-util-is/float.patch | 604 ++ .../node_modules/core-util-is/lib/util.js | 107 + .../node_modules/core-util-is/package.json | 65 + .../node_modules/core-util-is/test.js | 68 + .../fsevents/node_modules/debug/CHANGELOG.md | 395 + .../fsevents/node_modules/debug/LICENSE | 19 + .../fsevents/node_modules/debug/README.md | 455 + .../fsevents/node_modules/debug/dist/debug.js | 912 ++ .../fsevents/node_modules/debug/package.json | 105 + .../node_modules/debug/src/browser.js | 264 + .../fsevents/node_modules/debug/src/common.js | 266 + .../fsevents/node_modules/debug/src/index.js | 10 + .../fsevents/node_modules/debug/src/node.js | 257 + .../node_modules/deep-extend/CHANGELOG.md | 46 + .../fsevents/node_modules/deep-extend/LICENSE | 20 + .../node_modules/deep-extend/README.md | 91 + .../node_modules/deep-extend/index.js | 1 + .../deep-extend/lib/deep-extend.js | 150 + .../node_modules/deep-extend/package.json | 95 + .../node_modules/delegates/.npmignore | 1 + .../node_modules/delegates/History.md | 22 + .../fsevents/node_modules/delegates/License | 20 + .../fsevents/node_modules/delegates/Makefile | 8 + .../fsevents/node_modules/delegates/Readme.md | 94 + .../fsevents/node_modules/delegates/index.js | 121 + .../node_modules/delegates/package.json | 51 + .../node_modules/delegates/test/index.js | 94 + .../node_modules/detect-libc/.npmignore | 7 + .../fsevents/node_modules/detect-libc/LICENSE | 201 + .../node_modules/detect-libc/README.md | 78 + .../detect-libc/bin/detect-libc.js | 18 + .../detect-libc/lib/detect-libc.js | 92 + .../node_modules/detect-libc/package.json | 73 + .../fsevents/node_modules/fs-minipass/LICENSE | 15 + .../node_modules/fs-minipass/README.md | 70 + .../node_modules/fs-minipass/index.js | 386 + .../node_modules/fs-minipass/package.json | 65 + .../fsevents/node_modules/fs.realpath/LICENSE | 43 + .../node_modules/fs.realpath/README.md | 33 + .../node_modules/fs.realpath/index.js | 66 + .../fsevents/node_modules/fs.realpath/old.js | 303 + .../node_modules/fs.realpath/package.json | 62 + .../fsevents/node_modules/gauge/CHANGELOG.md | 160 + .../fsevents/node_modules/gauge/LICENSE | 13 + .../fsevents/node_modules/gauge/README.md | 399 + .../fsevents/node_modules/gauge/base-theme.js | 14 + .../fsevents/node_modules/gauge/error.js | 24 + .../fsevents/node_modules/gauge/has-color.js | 12 + .../fsevents/node_modules/gauge/index.js | 233 + .../fsevents/node_modules/gauge/package.json | 94 + .../fsevents/node_modules/gauge/plumbing.js | 48 + .../fsevents/node_modules/gauge/process.js | 3 + .../node_modules/gauge/progress-bar.js | 35 + .../node_modules/gauge/render-template.js | 181 + .../node_modules/gauge/set-immediate.js | 7 + .../node_modules/gauge/set-interval.js | 3 + .../fsevents/node_modules/gauge/spin.js | 5 + .../node_modules/gauge/template-item.js | 73 + .../fsevents/node_modules/gauge/theme-set.js | 115 + .../fsevents/node_modules/gauge/themes.js | 54 + .../node_modules/gauge/wide-truncate.js | 25 + .../fsevents/node_modules/glob/LICENSE | 15 + .../fsevents/node_modules/glob/README.md | 368 + .../fsevents/node_modules/glob/changelog.md | 67 + .../fsevents/node_modules/glob/common.js | 240 + .../fsevents/node_modules/glob/glob.js | 790 ++ .../fsevents/node_modules/glob/package.json | 79 + .../fsevents/node_modules/glob/sync.js | 486 + .../fsevents/node_modules/has-unicode/LICENSE | 14 + .../node_modules/has-unicode/README.md | 43 + .../node_modules/has-unicode/index.js | 16 + .../node_modules/has-unicode/package.json | 61 + .../node_modules/iconv-lite/Changelog.md | 162 + .../fsevents/node_modules/iconv-lite/LICENSE | 21 + .../node_modules/iconv-lite/README.md | 156 + .../iconv-lite/encodings/dbcs-codec.js | 555 ++ .../iconv-lite/encodings/dbcs-data.js | 176 + .../iconv-lite/encodings/index.js | 22 + .../iconv-lite/encodings/internal.js | 188 + .../iconv-lite/encodings/sbcs-codec.js | 72 + .../encodings/sbcs-data-generated.js | 451 + .../iconv-lite/encodings/sbcs-data.js | 174 + .../encodings/tables/big5-added.json | 122 + .../iconv-lite/encodings/tables/cp936.json | 264 + .../iconv-lite/encodings/tables/cp949.json | 273 + .../iconv-lite/encodings/tables/cp950.json | 177 + .../iconv-lite/encodings/tables/eucjp.json | 182 + .../encodings/tables/gb18030-ranges.json | 1 + .../encodings/tables/gbk-added.json | 55 + .../iconv-lite/encodings/tables/shiftjis.json | 125 + .../iconv-lite/encodings/utf16.js | 177 + .../node_modules/iconv-lite/encodings/utf7.js | 290 + .../iconv-lite/lib/bom-handling.js | 52 + .../iconv-lite/lib/extend-node.js | 217 + .../node_modules/iconv-lite/lib/index.d.ts | 24 + .../node_modules/iconv-lite/lib/index.js | 153 + .../node_modules/iconv-lite/lib/streams.js | 121 + .../node_modules/iconv-lite/package.json | 79 + .../fsevents/node_modules/ignore-walk/LICENSE | 15 + .../node_modules/ignore-walk/README.md | 60 + .../node_modules/ignore-walk/index.js | 265 + .../node_modules/ignore-walk/package.json | 74 + .../fsevents/node_modules/inflight/LICENSE | 15 + .../fsevents/node_modules/inflight/README.md | 37 + .../node_modules/inflight/inflight.js | 54 + .../node_modules/inflight/package.json | 61 + .../fsevents/node_modules/inherits/LICENSE | 16 + .../fsevents/node_modules/inherits/README.md | 42 + .../node_modules/inherits/inherits.js | 7 + .../node_modules/inherits/inherits_browser.js | 23 + .../node_modules/inherits/package.json | 65 + .../fsevents/node_modules/ini/LICENSE | 15 + .../fsevents/node_modules/ini/README.md | 102 + node_modules/fsevents/node_modules/ini/ini.js | 194 + .../fsevents/node_modules/ini/package.json | 66 + .../is-fullwidth-code-point/index.js | 46 + .../is-fullwidth-code-point/license | 21 + .../is-fullwidth-code-point/package.json | 80 + .../is-fullwidth-code-point/readme.md | 39 + .../fsevents/node_modules/isarray/.npmignore | 1 + .../fsevents/node_modules/isarray/.travis.yml | 4 + .../fsevents/node_modules/isarray/Makefile | 6 + .../fsevents/node_modules/isarray/README.md | 60 + .../node_modules/isarray/component.json | 19 + .../fsevents/node_modules/isarray/index.js | 5 + .../node_modules/isarray/package.json | 76 + .../fsevents/node_modules/isarray/test.js | 20 + .../fsevents/node_modules/minimatch/LICENSE | 15 + .../fsevents/node_modules/minimatch/README.md | 209 + .../node_modules/minimatch/minimatch.js | 923 ++ .../node_modules/minimatch/package.json | 67 + .../node_modules/minimist/.travis.yml | 4 + .../fsevents/node_modules/minimist/LICENSE | 18 + .../node_modules/minimist/example/parse.js | 2 + .../fsevents/node_modules/minimist/index.js | 187 + .../node_modules/minimist/package.json | 74 + .../node_modules/minimist/readme.markdown | 73 + .../node_modules/minimist/test/dash.js | 24 + .../minimist/test/default_bool.js | 20 + .../node_modules/minimist/test/dotted.js | 16 + .../node_modules/minimist/test/long.js | 31 + .../node_modules/minimist/test/parse.js | 318 + .../minimist/test/parse_modified.js | 9 + .../node_modules/minimist/test/short.js | 67 + .../node_modules/minimist/test/whitespace.js | 8 + .../fsevents/node_modules/minipass/LICENSE | 15 + .../fsevents/node_modules/minipass/README.md | 124 + .../fsevents/node_modules/minipass/index.js | 375 + .../node_modules/minipass/package.json | 72 + .../fsevents/node_modules/minizlib/LICENSE | 26 + .../fsevents/node_modules/minizlib/README.md | 44 + .../node_modules/minizlib/constants.js | 46 + .../fsevents/node_modules/minizlib/index.js | 335 + .../node_modules/minizlib/package.json | 74 + .../fsevents/node_modules/mkdirp/.travis.yml | 8 + .../fsevents/node_modules/mkdirp/LICENSE | 21 + .../fsevents/node_modules/mkdirp/bin/cmd.js | 33 + .../node_modules/mkdirp/bin/usage.txt | 12 + .../node_modules/mkdirp/examples/pow.js | 6 + .../fsevents/node_modules/mkdirp/index.js | 98 + .../fsevents/node_modules/mkdirp/package.json | 66 + .../node_modules/mkdirp/readme.markdown | 100 + .../node_modules/mkdirp/test/chmod.js | 41 + .../node_modules/mkdirp/test/clobber.js | 38 + .../node_modules/mkdirp/test/mkdirp.js | 28 + .../node_modules/mkdirp/test/opts_fs.js | 29 + .../node_modules/mkdirp/test/opts_fs_sync.js | 27 + .../fsevents/node_modules/mkdirp/test/perm.js | 32 + .../node_modules/mkdirp/test/perm_sync.js | 36 + .../fsevents/node_modules/mkdirp/test/race.js | 37 + .../fsevents/node_modules/mkdirp/test/rel.js | 32 + .../node_modules/mkdirp/test/return.js | 25 + .../node_modules/mkdirp/test/return_sync.js | 24 + .../fsevents/node_modules/mkdirp/test/root.js | 19 + .../fsevents/node_modules/mkdirp/test/sync.js | 32 + .../node_modules/mkdirp/test/umask.js | 28 + .../node_modules/mkdirp/test/umask_sync.js | 32 + .../fsevents/node_modules/ms/index.js | 162 + .../fsevents/node_modules/ms/license.md | 21 + .../fsevents/node_modules/ms/package.json | 72 + .../fsevents/node_modules/ms/readme.md | 60 + .../fsevents/node_modules/needle/.npmignore | 6 + .../fsevents/node_modules/needle/README.md | 593 ++ .../fsevents/node_modules/needle/bin/needle | 40 + .../needle/examples/deflated-stream.js | 22 + .../needle/examples/digest-auth.js | 16 + .../needle/examples/download-to-file.js | 18 + .../needle/examples/multipart-stream.js | 25 + .../needle/examples/parsed-stream.js | 23 + .../needle/examples/parsed-stream2.js | 21 + .../needle/examples/stream-events.js | 23 + .../needle/examples/stream-to-file.js | 14 + .../needle/examples/upload-image.js | 51 + .../fsevents/node_modules/needle/lib/auth.js | 110 + .../node_modules/needle/lib/cookies.js | 79 + .../node_modules/needle/lib/decoder.js | 53 + .../node_modules/needle/lib/multipart.js | 98 + .../node_modules/needle/lib/needle.js | 795 ++ .../node_modules/needle/lib/parsers.js | 120 + .../node_modules/needle/lib/querystring.js | 49 + .../fsevents/node_modules/needle/license.txt | 19 + .../node_modules/needle/package-lock.json | 395 + .../fsevents/node_modules/needle/package.json | 105 + .../needle/test/basic_auth_spec.js | 196 + .../needle/test/compression_spec.js | 94 + .../node_modules/needle/test/cookies_spec.js | 305 + .../node_modules/needle/test/decoder_spec.js | 86 + .../node_modules/needle/test/errors_spec.js | 286 + .../node_modules/needle/test/headers_spec.js | 198 + .../node_modules/needle/test/helpers.js | 72 + .../node_modules/needle/test/keys/ssl.cert | 21 + .../node_modules/needle/test/keys/ssl.key | 27 + .../needle/test/long_string_spec.js | 34 + .../node_modules/needle/test/output_spec.js | 254 + .../node_modules/needle/test/parsing_spec.js | 494 ++ .../needle/test/post_data_spec.js | 1021 +++ .../node_modules/needle/test/proxy_spec.js | 202 + .../needle/test/querystring_spec.js | 128 + .../node_modules/needle/test/redirect_spec.js | 392 + .../needle/test/redirect_with_timeout.js | 45 + .../needle/test/request_stream_spec.js | 202 + .../needle/test/response_stream_spec.js | 139 + .../needle/test/socket_pool_spec.js | 66 + .../node_modules/needle/test/url_spec.js | 155 + .../needle/test/utils/formidable.js | 17 + .../node_modules/needle/test/utils/proxy.js | 62 + .../node_modules/needle/test/utils/test.js | 104 + .../node_modules/node-pre-gyp/CHANGELOG.md | 432 + .../node_modules/node-pre-gyp/LICENSE | 27 + .../node_modules/node-pre-gyp/README.md | 693 ++ .../node_modules/node-pre-gyp/appveyor.yml | 30 + .../node-pre-gyp/bin/node-pre-gyp | 134 + .../node-pre-gyp/bin/node-pre-gyp.cmd | 2 + .../node_modules/node-pre-gyp/contributing.md | 10 + .../node_modules/node-pre-gyp/lib/build.js | 51 + .../node_modules/node-pre-gyp/lib/clean.js | 32 + .../node-pre-gyp/lib/configure.js | 52 + .../node_modules/node-pre-gyp/lib/info.js | 40 + .../node_modules/node-pre-gyp/lib/install.js | 255 + .../node-pre-gyp/lib/node-pre-gyp.js | 203 + .../node_modules/node-pre-gyp/lib/package.js | 56 + .../node-pre-gyp/lib/pre-binding.js | 30 + .../node_modules/node-pre-gyp/lib/publish.js | 79 + .../node_modules/node-pre-gyp/lib/rebuild.js | 21 + .../node-pre-gyp/lib/reinstall.js | 20 + .../node_modules/node-pre-gyp/lib/reveal.js | 33 + .../node-pre-gyp/lib/testbinary.js | 81 + .../node-pre-gyp/lib/testpackage.js | 55 + .../node-pre-gyp/lib/unpublish.js | 43 + .../node-pre-gyp/lib/util/abi_crosswalk.json | 1830 ++++ .../node-pre-gyp/lib/util/compile.js | 87 + .../node-pre-gyp/lib/util/handle_gyp_opts.js | 103 + .../node-pre-gyp/lib/util/napi.js | 204 + .../lib/util/nw-pre-gyp/index.html | 26 + .../lib/util/nw-pre-gyp/package.json | 9 + .../node-pre-gyp/lib/util/s3_setup.js | 27 + .../node-pre-gyp/lib/util/versioning.js | 331 + .../node_modules/node-pre-gyp/package.json | 89 + .../fsevents/node_modules/nopt/.npmignore | 1 + .../fsevents/node_modules/nopt/.travis.yml | 8 + .../fsevents/node_modules/nopt/CHANGELOG.md | 58 + .../fsevents/node_modules/nopt/LICENSE | 15 + .../fsevents/node_modules/nopt/README.md | 213 + .../fsevents/node_modules/nopt/bin/nopt.js | 54 + .../node_modules/nopt/examples/my-program.js | 30 + .../fsevents/node_modules/nopt/lib/nopt.js | 436 + .../fsevents/node_modules/nopt/package.json | 61 + .../fsevents/node_modules/nopt/test/basic.js | 303 + .../fsevents/node_modules/npm-bundled/LICENSE | 15 + .../node_modules/npm-bundled/README.md | 48 + .../node_modules/npm-bundled/index.js | 241 + .../node_modules/npm-bundled/package.json | 63 + .../node_modules/npm-packlist/LICENSE | 15 + .../node_modules/npm-packlist/README.md | 68 + .../node_modules/npm-packlist/index.js | 250 + .../node_modules/npm-packlist/package.json | 69 + .../fsevents/node_modules/npmlog/CHANGELOG.md | 49 + .../fsevents/node_modules/npmlog/LICENSE | 15 + .../fsevents/node_modules/npmlog/README.md | 216 + .../fsevents/node_modules/npmlog/log.js | 309 + .../fsevents/node_modules/npmlog/package.json | 64 + .../node_modules/number-is-nan/index.js | 4 + .../node_modules/number-is-nan/license | 21 + .../node_modules/number-is-nan/package.json | 70 + .../node_modules/number-is-nan/readme.md | 28 + .../node_modules/object-assign/index.js | 90 + .../node_modules/object-assign/license | 21 + .../node_modules/object-assign/package.json | 77 + .../node_modules/object-assign/readme.md | 61 + .../fsevents/node_modules/once/LICENSE | 15 + .../fsevents/node_modules/once/README.md | 79 + .../fsevents/node_modules/once/once.js | 42 + .../fsevents/node_modules/once/package.json | 70 + .../fsevents/node_modules/os-homedir/index.js | 24 + .../fsevents/node_modules/os-homedir/license | 21 + .../node_modules/os-homedir/package.json | 76 + .../node_modules/os-homedir/readme.md | 31 + .../fsevents/node_modules/os-tmpdir/index.js | 25 + .../fsevents/node_modules/os-tmpdir/license | 21 + .../node_modules/os-tmpdir/package.json | 76 + .../fsevents/node_modules/os-tmpdir/readme.md | 32 + .../fsevents/node_modules/osenv/LICENSE | 15 + .../fsevents/node_modules/osenv/README.md | 63 + .../fsevents/node_modules/osenv/osenv.js | 72 + .../fsevents/node_modules/osenv/package.json | 76 + .../node_modules/path-is-absolute/index.js | 20 + .../node_modules/path-is-absolute/license | 21 + .../path-is-absolute/package.json | 78 + .../node_modules/path-is-absolute/readme.md | 59 + .../process-nextick-args/index.js | 44 + .../process-nextick-args/license.md | 19 + .../process-nextick-args/package.json | 53 + .../process-nextick-args/readme.md | 18 + .../fsevents/node_modules/rc/LICENSE.APACHE2 | 15 + .../fsevents/node_modules/rc/LICENSE.BSD | 26 + .../fsevents/node_modules/rc/LICENSE.MIT | 24 + .../fsevents/node_modules/rc/README.md | 227 + .../fsevents/node_modules/rc/browser.js | 7 + node_modules/fsevents/node_modules/rc/cli.js | 4 + .../fsevents/node_modules/rc/index.js | 53 + .../fsevents/node_modules/rc/lib/utils.js | 104 + .../rc/node_modules/minimist/.travis.yml | 8 + .../rc/node_modules/minimist/LICENSE | 18 + .../rc/node_modules/minimist/example/parse.js | 2 + .../rc/node_modules/minimist/index.js | 236 + .../rc/node_modules/minimist/package.json | 76 + .../rc/node_modules/minimist/readme.markdown | 91 + .../rc/node_modules/minimist/test/all_bool.js | 32 + .../rc/node_modules/minimist/test/bool.js | 166 + .../rc/node_modules/minimist/test/dash.js | 31 + .../minimist/test/default_bool.js | 35 + .../rc/node_modules/minimist/test/dotted.js | 22 + .../rc/node_modules/minimist/test/kv_short.js | 16 + .../rc/node_modules/minimist/test/long.js | 31 + .../rc/node_modules/minimist/test/num.js | 36 + .../rc/node_modules/minimist/test/parse.js | 197 + .../minimist/test/parse_modified.js | 9 + .../rc/node_modules/minimist/test/short.js | 67 + .../node_modules/minimist/test/stop_early.js | 15 + .../rc/node_modules/minimist/test/unknown.js | 102 + .../node_modules/minimist/test/whitespace.js | 8 + .../fsevents/node_modules/rc/package.json | 67 + .../fsevents/node_modules/rc/test/ini.js | 16 + .../node_modules/rc/test/nested-env-vars.js | 50 + .../fsevents/node_modules/rc/test/test.js | 59 + .../node_modules/readable-stream/.travis.yml | 55 + .../readable-stream/CONTRIBUTING.md | 38 + .../readable-stream/GOVERNANCE.md | 136 + .../node_modules/readable-stream/LICENSE | 47 + .../node_modules/readable-stream/README.md | 58 + .../doc/wg-meetings/2015-01-30.md | 60 + .../readable-stream/duplex-browser.js | 1 + .../node_modules/readable-stream/duplex.js | 1 + .../readable-stream/lib/_stream_duplex.js | 131 + .../lib/_stream_passthrough.js | 47 + .../readable-stream/lib/_stream_readable.js | 1019 +++ .../readable-stream/lib/_stream_transform.js | 214 + .../readable-stream/lib/_stream_writable.js | 687 ++ .../lib/internal/streams/BufferList.js | 79 + .../lib/internal/streams/destroy.js | 74 + .../lib/internal/streams/stream-browser.js | 1 + .../lib/internal/streams/stream.js | 1 + .../node_modules/readable-stream/package.json | 84 + .../readable-stream/passthrough.js | 1 + .../readable-stream/readable-browser.js | 7 + .../node_modules/readable-stream/readable.js | 19 + .../node_modules/readable-stream/transform.js | 1 + .../readable-stream/writable-browser.js | 1 + .../node_modules/readable-stream/writable.js | 8 + .../fsevents/node_modules/rimraf/LICENSE | 15 + .../fsevents/node_modules/rimraf/README.md | 101 + .../fsevents/node_modules/rimraf/bin.js | 50 + .../fsevents/node_modules/rimraf/package.json | 70 + .../fsevents/node_modules/rimraf/rimraf.js | 364 + .../fsevents/node_modules/safe-buffer/LICENSE | 21 + .../node_modules/safe-buffer/README.md | 584 ++ .../node_modules/safe-buffer/index.d.ts | 187 + .../node_modules/safe-buffer/index.js | 62 + .../node_modules/safe-buffer/package.json | 68 + .../node_modules/safer-buffer/LICENSE | 21 + .../safer-buffer/Porting-Buffer.md | 268 + .../node_modules/safer-buffer/Readme.md | 156 + .../node_modules/safer-buffer/dangerous.js | 58 + .../node_modules/safer-buffer/package.json | 63 + .../node_modules/safer-buffer/safer.js | 77 + .../node_modules/safer-buffer/tests.js | 406 + .../fsevents/node_modules/sax/LICENSE | 41 + .../fsevents/node_modules/sax/README.md | 225 + .../fsevents/node_modules/sax/lib/sax.js | 1565 ++++ .../fsevents/node_modules/sax/package.json | 64 + .../fsevents/node_modules/semver/CHANGELOG.md | 39 + .../fsevents/node_modules/semver/LICENSE | 15 + .../fsevents/node_modules/semver/README.md | 411 + .../fsevents/node_modules/semver/bin/semver | 160 + .../fsevents/node_modules/semver/package.json | 63 + .../fsevents/node_modules/semver/range.bnf | 16 + .../fsevents/node_modules/semver/semver.js | 1483 ++++ .../node_modules/set-blocking/CHANGELOG.md | 26 + .../node_modules/set-blocking/LICENSE.txt | 14 + .../node_modules/set-blocking/README.md | 31 + .../node_modules/set-blocking/index.js | 7 + .../node_modules/set-blocking/package.json | 73 + .../node_modules/signal-exit/CHANGELOG.md | 27 + .../node_modules/signal-exit/LICENSE.txt | 16 + .../node_modules/signal-exit/README.md | 40 + .../node_modules/signal-exit/index.js | 157 + .../node_modules/signal-exit/package.json | 69 + .../node_modules/signal-exit/signals.js | 53 + .../node_modules/string-width/index.js | 37 + .../node_modules/string-width/license | 21 + .../node_modules/string-width/package.json | 92 + .../node_modules/string-width/readme.md | 42 + .../node_modules/string_decoder/.travis.yml | 50 + .../node_modules/string_decoder/LICENSE | 48 + .../node_modules/string_decoder/README.md | 47 + .../string_decoder/lib/string_decoder.js | 296 + .../node_modules/string_decoder/package.json | 62 + .../fsevents/node_modules/strip-ansi/index.js | 6 + .../fsevents/node_modules/strip-ansi/license | 21 + .../node_modules/strip-ansi/package.json | 105 + .../node_modules/strip-ansi/readme.md | 33 + .../node_modules/strip-json-comments/index.js | 70 + .../node_modules/strip-json-comments/license | 21 + .../strip-json-comments/package.json | 77 + .../strip-json-comments/readme.md | 64 + .../fsevents/node_modules/tar/LICENSE | 15 + .../fsevents/node_modules/tar/README.md | 954 ++ .../fsevents/node_modules/tar/index.js | 18 + .../fsevents/node_modules/tar/lib/buffer.js | 11 + .../fsevents/node_modules/tar/lib/create.js | 105 + .../fsevents/node_modules/tar/lib/extract.js | 112 + .../fsevents/node_modules/tar/lib/header.js | 289 + .../node_modules/tar/lib/high-level-opt.js | 29 + .../node_modules/tar/lib/large-numbers.js | 92 + .../fsevents/node_modules/tar/lib/list.js | 130 + .../fsevents/node_modules/tar/lib/mkdir.js | 206 + .../fsevents/node_modules/tar/lib/mode-fix.js | 14 + .../fsevents/node_modules/tar/lib/pack.js | 404 + .../fsevents/node_modules/tar/lib/parse.js | 423 + .../fsevents/node_modules/tar/lib/pax.js | 146 + .../node_modules/tar/lib/read-entry.js | 94 + .../fsevents/node_modules/tar/lib/replace.js | 220 + .../fsevents/node_modules/tar/lib/types.js | 44 + .../fsevents/node_modules/tar/lib/unpack.js | 621 ++ .../fsevents/node_modules/tar/lib/update.js | 36 + .../node_modules/tar/lib/warn-mixin.js | 14 + .../fsevents/node_modules/tar/lib/winchars.js | 23 + .../node_modules/tar/lib/write-entry.js | 422 + .../fsevents/node_modules/tar/package.json | 81 + .../node_modules/util-deprecate/History.md | 16 + .../node_modules/util-deprecate/LICENSE | 24 + .../node_modules/util-deprecate/README.md | 53 + .../node_modules/util-deprecate/browser.js | 67 + .../node_modules/util-deprecate/node.js | 6 + .../node_modules/util-deprecate/package.json | 59 + .../fsevents/node_modules/wide-align/LICENSE | 14 + .../node_modules/wide-align/README.md | 47 + .../fsevents/node_modules/wide-align/align.js | 65 + .../node_modules/wide-align/package.json | 69 + .../fsevents/node_modules/wrappy/LICENSE | 15 + .../fsevents/node_modules/wrappy/README.md | 36 + .../fsevents/node_modules/wrappy/package.json | 62 + .../fsevents/node_modules/wrappy/wrappy.js | 33 + .../fsevents/node_modules/yallist/LICENSE | 15 + .../fsevents/node_modules/yallist/README.md | 204 + .../fsevents/node_modules/yallist/iterator.js | 8 + .../node_modules/yallist/package.json | 66 + .../fsevents/node_modules/yallist/yallist.js | 376 + node_modules/fsevents/package.json | 46 + node_modules/fsevents/src/async.cc | 43 + node_modules/fsevents/src/constants.cc | 110 + node_modules/fsevents/src/methods.cc | 44 + node_modules/fsevents/src/storage.cc | 27 + node_modules/fsevents/src/thread.cc | 71 + node_modules/gauge/CHANGELOG.md | 160 + node_modules/gauge/LICENSE | 13 + node_modules/gauge/README.md | 399 + node_modules/gauge/base-theme.js | 14 + node_modules/gauge/error.js | 24 + node_modules/gauge/has-color.js | 12 + node_modules/gauge/index.js | 233 + .../is-fullwidth-code-point/index.js | 46 + .../is-fullwidth-code-point/license | 21 + .../is-fullwidth-code-point/package.json | 45 + .../is-fullwidth-code-point/readme.md | 39 + .../gauge/node_modules/string-width/index.js | 37 + .../gauge/node_modules/string-width/license | 21 + .../node_modules/string-width/package.json | 56 + .../gauge/node_modules/string-width/readme.md | 42 + node_modules/gauge/package.json | 63 + node_modules/gauge/plumbing.js | 48 + node_modules/gauge/process.js | 3 + node_modules/gauge/progress-bar.js | 35 + node_modules/gauge/render-template.js | 181 + node_modules/gauge/set-immediate.js | 7 + node_modules/gauge/set-interval.js | 3 + node_modules/gauge/spin.js | 5 + node_modules/gauge/template-item.js | 73 + node_modules/gauge/theme-set.js | 115 + node_modules/gauge/themes.js | 54 + node_modules/gauge/wide-truncate.js | 25 + node_modules/get-stream/buffer-stream.js | 51 + node_modules/get-stream/index.js | 51 + node_modules/get-stream/license | 21 + node_modules/get-stream/package.json | 48 + node_modules/get-stream/readme.md | 117 + node_modules/get-value/LICENSE | 21 + node_modules/get-value/index.js | 50 + node_modules/get-value/package.json | 79 + node_modules/glob-parent/LICENSE | 15 + node_modules/glob-parent/README.md | 109 + node_modules/glob-parent/index.js | 24 + .../glob-parent/node_modules/is-glob/LICENSE | 21 + .../node_modules/is-glob/README.md | 142 + .../glob-parent/node_modules/is-glob/index.js | 25 + .../node_modules/is-glob/package.json | 80 + node_modules/glob-parent/package.json | 42 + node_modules/glob/LICENSE | 21 + node_modules/glob/README.md | 373 + node_modules/glob/changelog.md | 67 + node_modules/glob/common.js | 240 + node_modules/glob/glob.js | 790 ++ node_modules/glob/package.json | 43 + node_modules/glob/sync.js | 486 + node_modules/global-dirs/index.js | 90 + node_modules/global-dirs/license | 9 + node_modules/global-dirs/package.json | 52 + node_modules/global-dirs/readme.md | 69 + node_modules/got/index.js | 364 + node_modules/got/license | 21 + .../got/node_modules/safe-buffer/LICENSE | 21 + .../got/node_modules/safe-buffer/README.md | 586 ++ .../got/node_modules/safe-buffer/index.d.ts | 187 + .../got/node_modules/safe-buffer/index.js | 64 + .../got/node_modules/safe-buffer/package.json | 37 + node_modules/got/package.json | 78 + node_modules/got/readme.md | 335 + node_modules/graceful-fs/LICENSE | 15 + node_modules/graceful-fs/README.md | 133 + node_modules/graceful-fs/clone.js | 19 + node_modules/graceful-fs/graceful-fs.js | 279 + node_modules/graceful-fs/legacy-streams.js | 118 + node_modules/graceful-fs/package.json | 50 + node_modules/graceful-fs/polyfills.js | 336 + node_modules/has-flag/index.js | 8 + node_modules/has-flag/license | 9 + node_modules/has-flag/package.json | 44 + node_modules/has-flag/readme.md | 70 + node_modules/has-unicode/LICENSE | 14 + node_modules/has-unicode/README.md | 43 + node_modules/has-unicode/index.js | 16 + node_modules/has-unicode/package.json | 30 + node_modules/has-value/LICENSE | 21 + node_modules/has-value/README.md | 149 + node_modules/has-value/index.js | 16 + node_modules/has-value/package.json | 83 + node_modules/has-values/LICENSE | 21 + node_modules/has-values/README.md | 129 + node_modules/has-values/index.js | 60 + .../has-values/node_modules/kind-of/LICENSE | 21 + .../has-values/node_modules/kind-of/README.md | 267 + .../has-values/node_modules/kind-of/index.js | 119 + .../node_modules/kind-of/package.json | 90 + node_modules/has-values/package.json | 82 + node_modules/http-errors/HISTORY.md | 149 + node_modules/http-errors/LICENSE | 23 + node_modules/http-errors/README.md | 163 + node_modules/http-errors/index.js | 266 + .../http-errors/node_modules/inherits/LICENSE | 16 + .../node_modules/inherits/README.md | 42 + .../node_modules/inherits/inherits.js | 7 + .../node_modules/inherits/inherits_browser.js | 23 + .../node_modules/inherits/package.json | 29 + node_modules/http-errors/package.json | 49 + node_modules/iconv-lite/Changelog.md | 162 + node_modules/iconv-lite/LICENSE | 21 + node_modules/iconv-lite/README.md | 156 + .../iconv-lite/encodings/dbcs-codec.js | 555 ++ .../iconv-lite/encodings/dbcs-data.js | 176 + node_modules/iconv-lite/encodings/index.js | 22 + node_modules/iconv-lite/encodings/internal.js | 188 + .../iconv-lite/encodings/sbcs-codec.js | 72 + .../encodings/sbcs-data-generated.js | 451 + .../iconv-lite/encodings/sbcs-data.js | 174 + .../encodings/tables/big5-added.json | 122 + .../iconv-lite/encodings/tables/cp936.json | 264 + .../iconv-lite/encodings/tables/cp949.json | 273 + .../iconv-lite/encodings/tables/cp950.json | 177 + .../iconv-lite/encodings/tables/eucjp.json | 182 + .../encodings/tables/gb18030-ranges.json | 1 + .../encodings/tables/gbk-added.json | 55 + .../iconv-lite/encodings/tables/shiftjis.json | 125 + node_modules/iconv-lite/encodings/utf16.js | 177 + node_modules/iconv-lite/encodings/utf7.js | 290 + node_modules/iconv-lite/lib/bom-handling.js | 52 + node_modules/iconv-lite/lib/extend-node.js | 217 + node_modules/iconv-lite/lib/index.d.ts | 24 + node_modules/iconv-lite/lib/index.js | 153 + node_modules/iconv-lite/lib/streams.js | 121 + node_modules/iconv-lite/package.json | 46 + node_modules/ignore-by-default/LICENSE | 14 + node_modules/ignore-by-default/README.md | 26 + node_modules/ignore-by-default/index.js | 12 + node_modules/ignore-by-default/package.json | 34 + node_modules/ignore-walk/LICENSE | 15 + node_modules/ignore-walk/README.md | 60 + node_modules/ignore-walk/index.js | 265 + node_modules/ignore-walk/package.json | 38 + node_modules/import-lazy/index.js | 53 + node_modules/import-lazy/license | 21 + node_modules/import-lazy/package.json | 44 + node_modules/import-lazy/readme.md | 64 + node_modules/imurmurhash/README.md | 122 + node_modules/imurmurhash/imurmurhash.js | 138 + node_modules/imurmurhash/imurmurhash.min.js | 12 + node_modules/imurmurhash/package.json | 40 + node_modules/inflight/LICENSE | 15 + node_modules/inflight/README.md | 37 + node_modules/inflight/inflight.js | 54 + node_modules/inflight/package.json | 29 + node_modules/inherits/LICENSE | 16 + node_modules/inherits/README.md | 42 + node_modules/inherits/inherits.js | 9 + node_modules/inherits/inherits_browser.js | 27 + node_modules/inherits/package.json | 29 + node_modules/ini/LICENSE | 15 + node_modules/ini/README.md | 102 + node_modules/ini/ini.js | 194 + node_modules/ini/package.json | 30 + node_modules/ipaddr.js/LICENSE | 19 + node_modules/ipaddr.js/README.md | 233 + node_modules/ipaddr.js/ipaddr.min.js | 1 + node_modules/ipaddr.js/lib/ipaddr.js | 673 ++ node_modules/ipaddr.js/lib/ipaddr.js.d.ts | 71 + node_modules/ipaddr.js/package.json | 34 + node_modules/is-accessor-descriptor/LICENSE | 21 + node_modules/is-accessor-descriptor/README.md | 144 + node_modules/is-accessor-descriptor/index.js | 69 + .../node_modules/kind-of/CHANGELOG.md | 157 + .../node_modules/kind-of/LICENSE | 21 + .../node_modules/kind-of/README.md | 365 + .../node_modules/kind-of/index.js | 129 + .../node_modules/kind-of/package.json | 88 + .../is-accessor-descriptor/package.json | 73 + node_modules/is-binary-path/index.js | 12 + node_modules/is-binary-path/license | 21 + node_modules/is-binary-path/package.json | 39 + node_modules/is-binary-path/readme.md | 34 + node_modules/is-buffer/LICENSE | 21 + node_modules/is-buffer/README.md | 53 + node_modules/is-buffer/index.js | 21 + node_modules/is-buffer/package.json | 51 + node_modules/is-buffer/test/basic.js | 24 + node_modules/is-ci/LICENSE | 21 + node_modules/is-ci/README.md | 50 + node_modules/is-ci/bin.js | 4 + node_modules/is-ci/index.js | 3 + node_modules/is-ci/package.json | 38 + node_modules/is-data-descriptor/LICENSE | 21 + node_modules/is-data-descriptor/README.md | 161 + node_modules/is-data-descriptor/index.js | 49 + .../node_modules/kind-of/CHANGELOG.md | 157 + .../node_modules/kind-of/LICENSE | 21 + .../node_modules/kind-of/README.md | 365 + .../node_modules/kind-of/index.js | 129 + .../node_modules/kind-of/package.json | 88 + node_modules/is-data-descriptor/package.json | 72 + node_modules/is-descriptor/LICENSE | 21 + node_modules/is-descriptor/README.md | 193 + node_modules/is-descriptor/index.js | 22 + .../node_modules/kind-of/CHANGELOG.md | 157 + .../node_modules/kind-of/LICENSE | 21 + .../node_modules/kind-of/README.md | 365 + .../node_modules/kind-of/index.js | 129 + .../node_modules/kind-of/package.json | 88 + node_modules/is-descriptor/package.json | 75 + node_modules/is-extendable/LICENSE | 21 + node_modules/is-extendable/README.md | 72 + node_modules/is-extendable/index.js | 13 + node_modules/is-extendable/package.json | 51 + node_modules/is-extglob/LICENSE | 21 + node_modules/is-extglob/README.md | 107 + node_modules/is-extglob/index.js | 20 + node_modules/is-extglob/package.json | 69 + node_modules/is-fullwidth-code-point/index.js | 46 + node_modules/is-fullwidth-code-point/license | 21 + .../is-fullwidth-code-point/package.json | 45 + .../is-fullwidth-code-point/readme.md | 39 + node_modules/is-glob/LICENSE | 21 + node_modules/is-glob/README.md | 206 + node_modules/is-glob/index.js | 48 + node_modules/is-glob/package.json | 81 + node_modules/is-installed-globally/index.js | 5 + node_modules/is-installed-globally/license | 9 + .../is-installed-globally/package.json | 48 + node_modules/is-installed-globally/readme.md | 39 + node_modules/is-npm/index.js | 4 + node_modules/is-npm/package.json | 32 + node_modules/is-npm/readme.md | 30 + node_modules/is-number/LICENSE | 21 + node_modules/is-number/README.md | 115 + node_modules/is-number/index.js | 22 + node_modules/is-number/package.json | 83 + node_modules/is-obj/index.js | 5 + node_modules/is-obj/license | 21 + node_modules/is-obj/package.json | 33 + node_modules/is-obj/readme.md | 34 + node_modules/is-path-inside/index.js | 14 + node_modules/is-path-inside/license | 21 + node_modules/is-path-inside/package.json | 37 + node_modules/is-path-inside/readme.md | 34 + node_modules/is-plain-object/LICENSE | 21 + node_modules/is-plain-object/README.md | 104 + node_modules/is-plain-object/index.d.ts | 5 + node_modules/is-plain-object/index.js | 37 + node_modules/is-plain-object/package.json | 79 + node_modules/is-redirect/index.js | 14 + node_modules/is-redirect/license | 21 + node_modules/is-redirect/package.json | 35 + node_modules/is-redirect/readme.md | 28 + node_modules/is-retry-allowed/index.js | 60 + node_modules/is-retry-allowed/license | 21 + node_modules/is-retry-allowed/package.json | 29 + node_modules/is-retry-allowed/readme.md | 42 + node_modules/is-stream/index.js | 21 + node_modules/is-stream/license | 21 + node_modules/is-stream/package.json | 38 + node_modules/is-stream/readme.md | 42 + node_modules/is-windows/LICENSE | 21 + node_modules/is-windows/README.md | 95 + node_modules/is-windows/index.js | 27 + node_modules/is-windows/package.json | 71 + node_modules/isarray/.npmignore | 1 + node_modules/isarray/.travis.yml | 4 + node_modules/isarray/Makefile | 6 + node_modules/isarray/README.md | 60 + node_modules/isarray/component.json | 19 + node_modules/isarray/index.js | 5 + node_modules/isarray/package.json | 45 + node_modules/isarray/test.js | 20 + node_modules/isexe/.npmignore | 2 + node_modules/isexe/LICENSE | 15 + node_modules/isexe/README.md | 51 + node_modules/isexe/index.js | 57 + node_modules/isexe/mode.js | 41 + node_modules/isexe/package.json | 31 + node_modules/isexe/test/basic.js | 221 + node_modules/isexe/windows.js | 42 + node_modules/isobject/LICENSE | 21 + node_modules/isobject/README.md | 122 + node_modules/isobject/index.d.ts | 5 + node_modules/isobject/index.js | 12 + node_modules/isobject/package.json | 74 + node_modules/kind-of/LICENSE | 21 + node_modules/kind-of/README.md | 261 + node_modules/kind-of/index.js | 116 + node_modules/kind-of/package.json | 90 + node_modules/latest-version/index.js | 4 + node_modules/latest-version/license | 21 + node_modules/latest-version/package.json | 39 + node_modules/latest-version/readme.md | 40 + node_modules/lines-and-columns/LICENSE | 21 + node_modules/lines-and-columns/README.md | 29 + .../lines-and-columns/dist/index.d.ts | 12 + node_modules/lines-and-columns/dist/index.js | 58 + node_modules/lines-and-columns/dist/index.mjs | 56 + node_modules/lines-and-columns/package.json | 45 + node_modules/lowercase-keys/index.js | 11 + node_modules/lowercase-keys/license | 21 + node_modules/lowercase-keys/package.json | 35 + node_modules/lowercase-keys/readme.md | 33 + node_modules/lru-cache/LICENSE | 15 + node_modules/lru-cache/README.md | 158 + node_modules/lru-cache/index.js | 468 + .../lru-cache/node_modules/yallist/LICENSE | 15 + .../lru-cache/node_modules/yallist/README.md | 204 + .../node_modules/yallist/iterator.js | 7 + .../node_modules/yallist/package.json | 29 + .../lru-cache/node_modules/yallist/yallist.js | 370 + node_modules/lru-cache/package.json | 36 + node_modules/make-dir/index.js | 85 + node_modules/make-dir/license | 9 + node_modules/make-dir/package.json | 54 + node_modules/make-dir/readme.md | 116 + node_modules/map-cache/LICENSE | 21 + node_modules/map-cache/README.md | 145 + node_modules/map-cache/index.js | 100 + node_modules/map-cache/package.json | 59 + node_modules/map-visit/LICENSE | 21 + node_modules/map-visit/README.md | 155 + node_modules/map-visit/index.js | 37 + node_modules/map-visit/package.json | 74 + node_modules/media-typer/HISTORY.md | 22 + node_modules/media-typer/LICENSE | 22 + node_modules/media-typer/README.md | 81 + node_modules/media-typer/index.js | 270 + node_modules/media-typer/package.json | 26 + node_modules/merge-descriptors/HISTORY.md | 21 + node_modules/merge-descriptors/LICENSE | 23 + node_modules/merge-descriptors/README.md | 48 + node_modules/merge-descriptors/index.js | 60 + node_modules/merge-descriptors/package.json | 32 + node_modules/methods/HISTORY.md | 29 + node_modules/methods/LICENSE | 24 + node_modules/methods/README.md | 51 + node_modules/methods/index.js | 69 + node_modules/methods/package.json | 36 + node_modules/micromatch/CHANGELOG.md | 37 + node_modules/micromatch/LICENSE | 21 + node_modules/micromatch/README.md | 1150 +++ node_modules/micromatch/index.js | 877 ++ node_modules/micromatch/lib/cache.js | 1 + node_modules/micromatch/lib/compilers.js | 77 + node_modules/micromatch/lib/parsers.js | 83 + node_modules/micromatch/lib/utils.js | 309 + .../node_modules/define-property/CHANGELOG.md | 82 + .../node_modules/define-property/LICENSE | 21 + .../node_modules/define-property/README.md | 117 + .../node_modules/define-property/index.js | 38 + .../node_modules/define-property/package.json | 67 + .../node_modules/extend-shallow/LICENSE | 21 + .../node_modules/extend-shallow/README.md | 97 + .../node_modules/extend-shallow/index.js | 60 + .../node_modules/extend-shallow/package.json | 83 + .../node_modules/is-extendable/LICENSE | 21 + .../node_modules/is-extendable/README.md | 88 + .../node_modules/is-extendable/index.d.ts | 5 + .../node_modules/is-extendable/index.js | 14 + .../node_modules/is-extendable/package.json | 67 + .../node_modules/kind-of/CHANGELOG.md | 157 + .../micromatch/node_modules/kind-of/LICENSE | 21 + .../micromatch/node_modules/kind-of/README.md | 365 + .../micromatch/node_modules/kind-of/index.js | 129 + .../node_modules/kind-of/package.json | 88 + node_modules/micromatch/package.json | 147 + node_modules/mime-db/HISTORY.md | 417 + node_modules/mime-db/LICENSE | 22 + node_modules/mime-db/README.md | 94 + node_modules/mime-db/db.json | 7834 +++++++++++++++++ node_modules/mime-db/index.js | 11 + node_modules/mime-db/package.json | 58 + node_modules/mime-types/HISTORY.md | 308 + node_modules/mime-types/LICENSE | 23 + node_modules/mime-types/README.md | 113 + node_modules/mime-types/index.js | 188 + node_modules/mime-types/package.json | 43 + node_modules/mime/.npmignore | 0 node_modules/mime/CHANGELOG.md | 164 + node_modules/mime/LICENSE | 21 + node_modules/mime/README.md | 90 + node_modules/mime/cli.js | 8 + node_modules/mime/mime.js | 108 + node_modules/mime/package.json | 44 + node_modules/mime/src/build.js | 53 + node_modules/mime/src/test.js | 60 + node_modules/mime/types.json | 1 + node_modules/minimatch/LICENSE | 15 + node_modules/minimatch/README.md | 209 + node_modules/minimatch/minimatch.js | 923 ++ node_modules/minimatch/package.json | 30 + node_modules/minimist/.travis.yml | 4 + node_modules/minimist/LICENSE | 18 + node_modules/minimist/example/parse.js | 2 + node_modules/minimist/index.js | 187 + node_modules/minimist/package.json | 40 + node_modules/minimist/readme.markdown | 73 + node_modules/minimist/test/dash.js | 24 + node_modules/minimist/test/default_bool.js | 20 + node_modules/minimist/test/dotted.js | 16 + node_modules/minimist/test/long.js | 31 + node_modules/minimist/test/parse.js | 318 + node_modules/minimist/test/parse_modified.js | 9 + node_modules/minimist/test/short.js | 67 + node_modules/minimist/test/whitespace.js | 8 + node_modules/minipass/LICENSE | 15 + node_modules/minipass/README.md | 124 + node_modules/minipass/index.js | 375 + .../minipass/node_modules/safe-buffer/LICENSE | 21 + .../node_modules/safe-buffer/README.md | 586 ++ .../node_modules/safe-buffer/index.d.ts | 187 + .../node_modules/safe-buffer/index.js | 64 + .../node_modules/safe-buffer/package.json | 37 + node_modules/minipass/package.json | 34 + node_modules/minizlib/LICENSE | 26 + node_modules/minizlib/README.md | 44 + node_modules/minizlib/constants.js | 46 + node_modules/minizlib/index.js | 335 + node_modules/minizlib/package.json | 38 + node_modules/mixin-deep/LICENSE | 21 + node_modules/mixin-deep/README.md | 80 + node_modules/mixin-deep/index.js | 64 + .../node_modules/is-extendable/LICENSE | 21 + .../node_modules/is-extendable/README.md | 88 + .../node_modules/is-extendable/index.d.ts | 5 + .../node_modules/is-extendable/index.js | 14 + .../node_modules/is-extendable/package.json | 67 + node_modules/mixin-deep/package.json | 65 + node_modules/mkdirp/.travis.yml | 8 + node_modules/mkdirp/LICENSE | 21 + node_modules/mkdirp/bin/cmd.js | 33 + node_modules/mkdirp/bin/usage.txt | 12 + node_modules/mkdirp/examples/pow.js | 6 + node_modules/mkdirp/index.js | 98 + node_modules/mkdirp/package.json | 27 + node_modules/mkdirp/readme.markdown | 100 + node_modules/mkdirp/test/chmod.js | 41 + node_modules/mkdirp/test/clobber.js | 38 + node_modules/mkdirp/test/mkdirp.js | 28 + node_modules/mkdirp/test/opts_fs.js | 29 + node_modules/mkdirp/test/opts_fs_sync.js | 27 + node_modules/mkdirp/test/perm.js | 32 + node_modules/mkdirp/test/perm_sync.js | 36 + node_modules/mkdirp/test/race.js | 37 + node_modules/mkdirp/test/rel.js | 32 + node_modules/mkdirp/test/return.js | 25 + node_modules/mkdirp/test/return_sync.js | 24 + node_modules/mkdirp/test/root.js | 19 + node_modules/mkdirp/test/sync.js | 32 + node_modules/mkdirp/test/umask.js | 28 + node_modules/mkdirp/test/umask_sync.js | 32 + node_modules/ms/index.js | 152 + node_modules/ms/license.md | 21 + node_modules/ms/package.json | 37 + node_modules/ms/readme.md | 51 + node_modules/mz/HISTORY.md | 66 + node_modules/mz/LICENSE | 22 + node_modules/mz/README.md | 106 + node_modules/mz/child_process.js | 8 + node_modules/mz/crypto.js | 9 + node_modules/mz/dns.js | 16 + node_modules/mz/fs.js | 62 + node_modules/mz/index.js | 8 + node_modules/mz/package.json | 44 + node_modules/mz/readline.js | 64 + node_modules/mz/zlib.js | 13 + node_modules/nan/CHANGELOG.md | 529 ++ node_modules/nan/LICENSE.md | 13 + node_modules/nan/README.md | 456 + node_modules/nan/doc/asyncworker.md | 146 + node_modules/nan/doc/buffers.md | 54 + node_modules/nan/doc/callback.md | 76 + node_modules/nan/doc/converters.md | 41 + node_modules/nan/doc/errors.md | 226 + node_modules/nan/doc/json.md | 62 + node_modules/nan/doc/maybe_types.md | 583 ++ node_modules/nan/doc/methods.md | 661 ++ node_modules/nan/doc/new.md | 147 + node_modules/nan/doc/node_misc.md | 123 + node_modules/nan/doc/object_wrappers.md | 263 + node_modules/nan/doc/persistent.md | 296 + node_modules/nan/doc/scopes.md | 73 + node_modules/nan/doc/script.md | 38 + node_modules/nan/doc/string_bytes.md | 62 + node_modules/nan/doc/v8_internals.md | 199 + node_modules/nan/doc/v8_misc.md | 85 + node_modules/nan/include_dirs.js | 1 + node_modules/nan/nan.h | 2892 ++++++ node_modules/nan/nan_callbacks.h | 88 + node_modules/nan/nan_callbacks_12_inl.h | 514 ++ node_modules/nan/nan_callbacks_pre_12_inl.h | 520 ++ node_modules/nan/nan_converters.h | 72 + node_modules/nan/nan_converters_43_inl.h | 68 + node_modules/nan/nan_converters_pre_43_inl.h | 42 + .../nan/nan_define_own_property_helper.h | 29 + node_modules/nan/nan_implementation_12_inl.h | 430 + .../nan/nan_implementation_pre_12_inl.h | 263 + node_modules/nan/nan_json.h | 166 + node_modules/nan/nan_maybe_43_inl.h | 356 + node_modules/nan/nan_maybe_pre_43_inl.h | 268 + node_modules/nan/nan_new.h | 340 + node_modules/nan/nan_object_wrap.h | 156 + node_modules/nan/nan_persistent_12_inl.h | 132 + node_modules/nan/nan_persistent_pre_12_inl.h | 242 + node_modules/nan/nan_private.h | 73 + node_modules/nan/nan_string_bytes.h | 305 + node_modules/nan/nan_typedarray_contents.h | 90 + node_modules/nan/nan_weak.h | 437 + node_modules/nan/package.json | 37 + node_modules/nan/tools/1to2.js | 412 + node_modules/nan/tools/README.md | 14 + node_modules/nan/tools/package.json | 19 + node_modules/nanomatch/CHANGELOG.md | 57 + node_modules/nanomatch/LICENSE | 21 + node_modules/nanomatch/README.md | 1148 +++ node_modules/nanomatch/index.js | 838 ++ node_modules/nanomatch/lib/cache.js | 1 + node_modules/nanomatch/lib/compilers.js | 339 + node_modules/nanomatch/lib/parsers.js | 386 + node_modules/nanomatch/lib/utils.js | 379 + .../node_modules/define-property/CHANGELOG.md | 82 + .../node_modules/define-property/LICENSE | 21 + .../node_modules/define-property/README.md | 117 + .../node_modules/define-property/index.js | 38 + .../node_modules/define-property/package.json | 67 + .../node_modules/extend-shallow/LICENSE | 21 + .../node_modules/extend-shallow/README.md | 97 + .../node_modules/extend-shallow/index.js | 60 + .../node_modules/extend-shallow/package.json | 83 + .../node_modules/is-extendable/LICENSE | 21 + .../node_modules/is-extendable/README.md | 88 + .../node_modules/is-extendable/index.d.ts | 5 + .../node_modules/is-extendable/index.js | 14 + .../node_modules/is-extendable/package.json | 67 + .../node_modules/kind-of/CHANGELOG.md | 157 + .../nanomatch/node_modules/kind-of/LICENSE | 21 + .../nanomatch/node_modules/kind-of/README.md | 365 + .../nanomatch/node_modules/kind-of/index.js | 129 + .../node_modules/kind-of/package.json | 88 + node_modules/nanomatch/package.json | 134 + node_modules/needle/README.md | 593 ++ node_modules/needle/bin/needle | 40 + .../needle/examples/deflated-stream.js | 22 + node_modules/needle/examples/digest-auth.js | 16 + .../needle/examples/download-to-file.js | 18 + .../needle/examples/multipart-stream.js | 25 + node_modules/needle/examples/parsed-stream.js | 23 + .../needle/examples/parsed-stream2.js | 21 + node_modules/needle/examples/stream-events.js | 23 + .../needle/examples/stream-to-file.js | 14 + node_modules/needle/examples/upload-image.js | 51 + node_modules/needle/lib/auth.js | 110 + node_modules/needle/lib/cookies.js | 79 + node_modules/needle/lib/decoder.js | 53 + node_modules/needle/lib/multipart.js | 98 + node_modules/needle/lib/needle.js | 797 ++ node_modules/needle/lib/parsers.js | 120 + node_modules/needle/lib/querystring.js | 49 + node_modules/needle/license.txt | 19 + .../needle/node_modules/debug/CHANGELOG.md | 395 + .../needle/node_modules/debug/LICENSE | 19 + .../needle/node_modules/debug/README.md | 437 + .../needle/node_modules/debug/dist/debug.js | 886 ++ .../needle/node_modules/debug/node.js | 1 + .../needle/node_modules/debug/package.json | 51 + .../needle/node_modules/debug/src/browser.js | 180 + .../needle/node_modules/debug/src/common.js | 249 + .../needle/node_modules/debug/src/index.js | 12 + .../needle/node_modules/debug/src/node.js | 174 + node_modules/needle/node_modules/ms/index.js | 162 + .../needle/node_modules/ms/license.md | 21 + .../needle/node_modules/ms/package.json | 37 + node_modules/needle/node_modules/ms/readme.md | 60 + node_modules/needle/package.json | 70 + node_modules/needle/test/basic_auth_spec.js | 196 + node_modules/needle/test/compression_spec.js | 94 + node_modules/needle/test/cookies_spec.js | 305 + node_modules/needle/test/decoder_spec.js | 86 + node_modules/needle/test/errors_spec.js | 286 + node_modules/needle/test/headers_spec.js | 198 + node_modules/needle/test/helpers.js | 72 + node_modules/needle/test/long_string_spec.js | 34 + node_modules/needle/test/output_spec.js | 254 + node_modules/needle/test/parsing_spec.js | 494 ++ node_modules/needle/test/post_data_spec.js | 1021 +++ node_modules/needle/test/proxy_spec.js | 202 + node_modules/needle/test/querystring_spec.js | 128 + node_modules/needle/test/redirect_spec.js | 392 + .../needle/test/redirect_with_timeout.js | 45 + .../needle/test/request_stream_spec.js | 202 + .../needle/test/response_stream_spec.js | 139 + node_modules/needle/test/socket_pool_spec.js | 66 + node_modules/needle/test/url_spec.js | 155 + node_modules/needle/test/utils/formidable.js | 17 + node_modules/needle/test/utils/proxy.js | 62 + node_modules/needle/test/utils/test.js | 104 + node_modules/negotiator/HISTORY.md | 103 + node_modules/negotiator/LICENSE | 24 + node_modules/negotiator/README.md | 203 + node_modules/negotiator/index.js | 124 + node_modules/negotiator/lib/charset.js | 169 + node_modules/negotiator/lib/encoding.js | 184 + node_modules/negotiator/lib/language.js | 179 + node_modules/negotiator/lib/mediaType.js | 294 + node_modules/negotiator/package.json | 42 + node_modules/node-modules-regexp/index.js | 2 + node_modules/node-modules-regexp/license | 21 + node_modules/node-modules-regexp/package.json | 44 + node_modules/node-modules-regexp/readme.md | 32 + node_modules/node-pre-gyp/CHANGELOG.md | 432 + node_modules/node-pre-gyp/LICENSE | 27 + node_modules/node-pre-gyp/README.md | 693 ++ node_modules/node-pre-gyp/appveyor.yml | 30 + node_modules/node-pre-gyp/bin/node-pre-gyp | 134 + .../node-pre-gyp/bin/node-pre-gyp.cmd | 2 + node_modules/node-pre-gyp/contributing.md | 10 + node_modules/node-pre-gyp/lib/build.js | 51 + node_modules/node-pre-gyp/lib/clean.js | 32 + node_modules/node-pre-gyp/lib/configure.js | 52 + node_modules/node-pre-gyp/lib/info.js | 40 + node_modules/node-pre-gyp/lib/install.js | 255 + node_modules/node-pre-gyp/lib/node-pre-gyp.js | 203 + node_modules/node-pre-gyp/lib/package.js | 56 + node_modules/node-pre-gyp/lib/pre-binding.js | 30 + node_modules/node-pre-gyp/lib/publish.js | 79 + node_modules/node-pre-gyp/lib/rebuild.js | 21 + node_modules/node-pre-gyp/lib/reinstall.js | 20 + node_modules/node-pre-gyp/lib/reveal.js | 33 + node_modules/node-pre-gyp/lib/testbinary.js | 81 + node_modules/node-pre-gyp/lib/testpackage.js | 55 + node_modules/node-pre-gyp/lib/unpublish.js | 43 + .../node-pre-gyp/lib/util/abi_crosswalk.json | 1830 ++++ node_modules/node-pre-gyp/lib/util/compile.js | 87 + .../node-pre-gyp/lib/util/handle_gyp_opts.js | 103 + node_modules/node-pre-gyp/lib/util/napi.js | 204 + .../lib/util/nw-pre-gyp/index.html | 26 + .../lib/util/nw-pre-gyp/package.json | 9 + .../node-pre-gyp/lib/util/s3_setup.js | 27 + .../node-pre-gyp/lib/util/versioning.js | 331 + .../node_modules/.bin/detect-libc | 1 + .../node-pre-gyp/node_modules/.bin/mkdirp | 1 + .../node-pre-gyp/node_modules/.bin/needle | 1 + .../node-pre-gyp/node_modules/.bin/nopt | 1 + .../node-pre-gyp/node_modules/.bin/rc | 1 + .../node-pre-gyp/node_modules/.bin/rimraf | 1 + .../node-pre-gyp/node_modules/.bin/semver | 1 + .../node-pre-gyp/node_modules/nopt/.npmignore | 1 + .../node_modules/nopt/.travis.yml | 8 + .../node_modules/nopt/CHANGELOG.md | 58 + .../node-pre-gyp/node_modules/nopt/LICENSE | 15 + .../node-pre-gyp/node_modules/nopt/README.md | 213 + .../node_modules/nopt/bin/nopt.js | 54 + .../node_modules/nopt/examples/my-program.js | 30 + .../node_modules/nopt/lib/nopt.js | 436 + .../node_modules/nopt/package.json | 23 + .../node_modules/nopt/test/basic.js | 303 + node_modules/node-pre-gyp/package.json | 52 + node_modules/nodemon/.jscsrc | 13 + node_modules/nodemon/.jshintrc | 16 + node_modules/nodemon/.travis.yml | 19 + node_modules/nodemon/README.md | 379 + node_modules/nodemon/bin/nodemon.js | 16 + node_modules/nodemon/bin/postinstall.js | 29 + node_modules/nodemon/commitlint.config.js | 7 + node_modules/nodemon/doc/cli/authors.txt | 8 + node_modules/nodemon/doc/cli/config.txt | 44 + node_modules/nodemon/doc/cli/help.txt | 29 + node_modules/nodemon/doc/cli/logo.txt | 20 + node_modules/nodemon/doc/cli/options.txt | 36 + node_modules/nodemon/doc/cli/topics.txt | 8 + node_modules/nodemon/doc/cli/usage.txt | 3 + node_modules/nodemon/doc/cli/whoami.txt | 9 + node_modules/nodemon/lib/cli/index.js | 49 + node_modules/nodemon/lib/cli/parse.js | 230 + node_modules/nodemon/lib/config/command.js | 43 + node_modules/nodemon/lib/config/defaults.js | 28 + node_modules/nodemon/lib/config/exec.js | 225 + node_modules/nodemon/lib/config/index.js | 93 + node_modules/nodemon/lib/config/load.js | 254 + node_modules/nodemon/lib/help/index.js | 27 + node_modules/nodemon/lib/index.js | 1 + node_modules/nodemon/lib/monitor/index.js | 4 + node_modules/nodemon/lib/monitor/match.js | 269 + node_modules/nodemon/lib/monitor/run.js | 438 + node_modules/nodemon/lib/monitor/signals.js | 34 + node_modules/nodemon/lib/monitor/watch.js | 234 + node_modules/nodemon/lib/nodemon.js | 301 + node_modules/nodemon/lib/rules/add.js | 89 + node_modules/nodemon/lib/rules/index.js | 53 + node_modules/nodemon/lib/rules/parse.js | 43 + node_modules/nodemon/lib/spawn.js | 55 + node_modules/nodemon/lib/utils/bus.js | 44 + node_modules/nodemon/lib/utils/clone.js | 40 + node_modules/nodemon/lib/utils/colour.js | 26 + node_modules/nodemon/lib/utils/index.js | 102 + node_modules/nodemon/lib/utils/log.js | 82 + node_modules/nodemon/lib/utils/merge.js | 47 + node_modules/nodemon/lib/version.js | 100 + .../nodemon/node_modules/.bin/nodetouch | 1 + node_modules/nodemon/node_modules/.bin/semver | 1 + .../nodemon/node_modules/debug/CHANGELOG.md | 395 + .../nodemon/node_modules/debug/LICENSE | 19 + .../nodemon/node_modules/debug/README.md | 437 + .../nodemon/node_modules/debug/dist/debug.js | 886 ++ .../nodemon/node_modules/debug/node.js | 1 + .../nodemon/node_modules/debug/package.json | 51 + .../nodemon/node_modules/debug/src/browser.js | 180 + .../nodemon/node_modules/debug/src/common.js | 249 + .../nodemon/node_modules/debug/src/index.js | 12 + .../nodemon/node_modules/debug/src/node.js | 174 + node_modules/nodemon/node_modules/ms/index.js | 162 + .../nodemon/node_modules/ms/license.md | 21 + .../nodemon/node_modules/ms/package.json | 37 + .../nodemon/node_modules/ms/readme.md | 60 + node_modules/nodemon/package.json | 1 + node_modules/nopt/.gitignore | 0 node_modules/nopt/LICENSE | 23 + node_modules/nopt/README.md | 208 + node_modules/nopt/bin/nopt.js | 44 + node_modules/nopt/examples/my-program.js | 30 + node_modules/nopt/lib/nopt.js | 552 ++ node_modules/nopt/package.json | 12 + node_modules/normalize-path/LICENSE | 21 + node_modules/normalize-path/README.md | 127 + node_modules/normalize-path/index.js | 35 + node_modules/normalize-path/package.json | 77 + node_modules/npm-bundled/LICENSE | 15 + node_modules/npm-bundled/README.md | 48 + node_modules/npm-bundled/index.js | 241 + node_modules/npm-bundled/package.json | 27 + node_modules/npm-packlist/LICENSE | 15 + node_modules/npm-packlist/README.md | 68 + node_modules/npm-packlist/index.js | 274 + node_modules/npm-packlist/package.json | 38 + node_modules/npm-run-path/index.js | 39 + node_modules/npm-run-path/license | 21 + node_modules/npm-run-path/package.json | 45 + node_modules/npm-run-path/readme.md | 81 + node_modules/npmlog/CHANGELOG.md | 49 + node_modules/npmlog/LICENSE | 15 + node_modules/npmlog/README.md | 216 + node_modules/npmlog/log.js | 309 + node_modules/npmlog/package.json | 28 + node_modules/number-is-nan/index.js | 4 + node_modules/number-is-nan/license | 21 + node_modules/number-is-nan/package.json | 35 + node_modules/number-is-nan/readme.md | 28 + node_modules/object-assign/index.js | 90 + node_modules/object-assign/license | 21 + node_modules/object-assign/package.json | 42 + node_modules/object-assign/readme.md | 61 + node_modules/object-copy/LICENSE | 21 + node_modules/object-copy/index.js | 174 + node_modules/object-copy/package.json | 47 + node_modules/object-visit/LICENSE | 21 + node_modules/object-visit/README.md | 83 + node_modules/object-visit/index.js | 33 + node_modules/object-visit/package.json | 65 + node_modules/object.pick/LICENSE | 21 + node_modules/object.pick/README.md | 76 + node_modules/object.pick/index.js | 35 + node_modules/object.pick/package.json | 60 + node_modules/on-finished/HISTORY.md | 88 + node_modules/on-finished/LICENSE | 23 + node_modules/on-finished/README.md | 154 + node_modules/on-finished/index.js | 196 + node_modules/on-finished/package.json | 31 + node_modules/once/LICENSE | 15 + node_modules/once/README.md | 79 + node_modules/once/once.js | 42 + node_modules/once/package.json | 33 + node_modules/os-homedir/index.js | 24 + node_modules/os-homedir/license | 21 + node_modules/os-homedir/package.json | 41 + node_modules/os-homedir/readme.md | 31 + node_modules/os-tmpdir/index.js | 25 + node_modules/os-tmpdir/license | 21 + node_modules/os-tmpdir/package.json | 41 + node_modules/os-tmpdir/readme.md | 32 + node_modules/osenv/LICENSE | 15 + node_modules/osenv/README.md | 63 + node_modules/osenv/osenv.js | 72 + node_modules/osenv/package.json | 37 + node_modules/p-finally/index.js | 15 + node_modules/p-finally/license | 21 + node_modules/p-finally/package.json | 42 + node_modules/p-finally/readme.md | 47 + node_modules/package-json/index.js | 67 + node_modules/package-json/license | 21 + .../package-json/node_modules/.bin/semver | 1 + node_modules/package-json/package.json | 43 + node_modules/package-json/readme.md | 91 + node_modules/parseurl/HISTORY.md | 58 + node_modules/parseurl/LICENSE | 24 + node_modules/parseurl/README.md | 133 + node_modules/parseurl/index.js | 158 + node_modules/parseurl/package.json | 40 + node_modules/pascalcase/LICENSE | 21 + node_modules/pascalcase/README.md | 80 + node_modules/pascalcase/index.js | 21 + node_modules/pascalcase/package.json | 46 + node_modules/path-dirname/index.js | 143 + node_modules/path-dirname/license | 22 + node_modules/path-dirname/package.json | 29 + node_modules/path-dirname/readme.md | 53 + node_modules/path-is-absolute/index.js | 20 + node_modules/path-is-absolute/license | 21 + node_modules/path-is-absolute/package.json | 43 + node_modules/path-is-absolute/readme.md | 59 + node_modules/path-is-inside/LICENSE.txt | 47 + .../path-is-inside/lib/path-is-inside.js | 28 + node_modules/path-is-inside/package.json | 21 + node_modules/path-key/index.js | 13 + node_modules/path-key/license | 21 + node_modules/path-key/package.json | 39 + node_modules/path-key/readme.md | 51 + node_modules/path-to-regexp/History.md | 36 + node_modules/path-to-regexp/LICENSE | 21 + node_modules/path-to-regexp/Readme.md | 35 + node_modules/path-to-regexp/index.js | 129 + node_modules/path-to-regexp/package.json | 30 + node_modules/pify/index.js | 84 + node_modules/pify/license | 9 + node_modules/pify/package.json | 51 + node_modules/pify/readme.md | 131 + node_modules/pirates/LICENSE | 21 + node_modules/pirates/README.md | 89 + node_modules/pirates/index.d.ts | 30 + node_modules/pirates/lib/index.js | 118 + node_modules/pirates/package.json | 84 + node_modules/posix-character-classes/LICENSE | 21 + .../posix-character-classes/README.md | 103 + node_modules/posix-character-classes/index.js | 22 + .../posix-character-classes/package.json | 54 + node_modules/prepend-http/index.js | 14 + node_modules/prepend-http/license | 21 + node_modules/prepend-http/package.json | 35 + node_modules/prepend-http/readme.md | 31 + node_modules/process-nextick-args/index.js | 45 + node_modules/process-nextick-args/license.md | 19 + .../process-nextick-args/package.json | 25 + node_modules/process-nextick-args/readme.md | 18 + node_modules/proxy-addr/HISTORY.md | 150 + node_modules/proxy-addr/LICENSE | 22 + node_modules/proxy-addr/README.md | 155 + node_modules/proxy-addr/index.js | 327 + node_modules/proxy-addr/package.json | 47 + node_modules/pseudomap/LICENSE | 15 + node_modules/pseudomap/README.md | 60 + node_modules/pseudomap/map.js | 9 + node_modules/pseudomap/package.json | 25 + node_modules/pseudomap/pseudomap.js | 113 + node_modules/pseudomap/test/basic.js | 86 + node_modules/pstree.remy/.travis.yml | 8 + node_modules/pstree.remy/LICENSE | 7 + node_modules/pstree.remy/lib/index.js | 32 + node_modules/pstree.remy/lib/tree.js | 34 + node_modules/pstree.remy/lib/utils.js | 56 + node_modules/pstree.remy/package.json | 24 + .../pstree.remy/tests/fixtures/index.js | 8 + node_modules/pstree.remy/tests/fixtures/out1 | 10 + node_modules/pstree.remy/tests/fixtures/out2 | 29 + node_modules/pstree.remy/tests/index.test.js | 40 + node_modules/qs/.editorconfig | 30 + node_modules/qs/.eslintignore | 1 + node_modules/qs/.eslintrc | 21 + node_modules/qs/CHANGELOG.md | 256 + node_modules/qs/LICENSE | 28 + node_modules/qs/README.md | 570 ++ node_modules/qs/dist/qs.js | 782 ++ node_modules/qs/lib/formats.js | 18 + node_modules/qs/lib/index.js | 11 + node_modules/qs/lib/parse.js | 242 + node_modules/qs/lib/stringify.js | 269 + node_modules/qs/lib/utils.js | 230 + node_modules/qs/package.json | 58 + node_modules/qs/test/.eslintrc | 17 + node_modules/qs/test/index.js | 7 + node_modules/qs/test/parse.js | 676 ++ node_modules/qs/test/stringify.js | 679 ++ node_modules/qs/test/utils.js | 136 + node_modules/range-parser/HISTORY.md | 56 + node_modules/range-parser/LICENSE | 23 + node_modules/range-parser/README.md | 84 + node_modules/range-parser/index.js | 162 + node_modules/range-parser/package.json | 44 + node_modules/raw-body/HISTORY.md | 270 + node_modules/raw-body/LICENSE | 22 + node_modules/raw-body/README.md | 219 + node_modules/raw-body/index.d.ts | 87 + node_modules/raw-body/index.js | 286 + node_modules/raw-body/package.json | 48 + node_modules/rc/LICENSE.APACHE2 | 15 + node_modules/rc/LICENSE.BSD | 26 + node_modules/rc/LICENSE.MIT | 24 + node_modules/rc/README.md | 227 + node_modules/rc/browser.js | 7 + node_modules/rc/cli.js | 4 + node_modules/rc/index.js | 53 + node_modules/rc/lib/utils.js | 104 + .../rc/node_modules/minimist/.travis.yml | 8 + node_modules/rc/node_modules/minimist/LICENSE | 18 + .../rc/node_modules/minimist/example/parse.js | 2 + .../rc/node_modules/minimist/index.js | 236 + .../rc/node_modules/minimist/package.json | 45 + .../rc/node_modules/minimist/readme.markdown | 91 + .../rc/node_modules/minimist/test/all_bool.js | 32 + .../rc/node_modules/minimist/test/bool.js | 166 + .../rc/node_modules/minimist/test/dash.js | 31 + .../minimist/test/default_bool.js | 35 + .../rc/node_modules/minimist/test/dotted.js | 22 + .../rc/node_modules/minimist/test/kv_short.js | 16 + .../rc/node_modules/minimist/test/long.js | 31 + .../rc/node_modules/minimist/test/num.js | 36 + .../rc/node_modules/minimist/test/parse.js | 197 + .../minimist/test/parse_modified.js | 9 + .../rc/node_modules/minimist/test/short.js | 67 + .../node_modules/minimist/test/stop_early.js | 15 + .../rc/node_modules/minimist/test/unknown.js | 102 + .../node_modules/minimist/test/whitespace.js | 8 + node_modules/rc/package.json | 29 + node_modules/rc/test/ini.js | 16 + node_modules/rc/test/nested-env-vars.js | 50 + node_modules/rc/test/test.js | 59 + node_modules/readable-stream/.travis.yml | 55 + node_modules/readable-stream/CONTRIBUTING.md | 38 + node_modules/readable-stream/GOVERNANCE.md | 136 + node_modules/readable-stream/LICENSE | 47 + node_modules/readable-stream/README.md | 58 + .../doc/wg-meetings/2015-01-30.md | 60 + .../readable-stream/duplex-browser.js | 1 + node_modules/readable-stream/duplex.js | 1 + .../readable-stream/lib/_stream_duplex.js | 131 + .../lib/_stream_passthrough.js | 47 + .../readable-stream/lib/_stream_readable.js | 1019 +++ .../readable-stream/lib/_stream_transform.js | 214 + .../readable-stream/lib/_stream_writable.js | 687 ++ .../lib/internal/streams/BufferList.js | 79 + .../lib/internal/streams/destroy.js | 74 + .../lib/internal/streams/stream-browser.js | 1 + .../lib/internal/streams/stream.js | 1 + node_modules/readable-stream/package.json | 52 + node_modules/readable-stream/passthrough.js | 1 + .../readable-stream/readable-browser.js | 7 + node_modules/readable-stream/readable.js | 19 + node_modules/readable-stream/transform.js | 1 + .../readable-stream/writable-browser.js | 1 + node_modules/readable-stream/writable.js | 8 + node_modules/readdirp/LICENSE | 20 + node_modules/readdirp/README.md | 204 + node_modules/readdirp/package.json | 50 + node_modules/readdirp/readdirp.js | 294 + node_modules/readdirp/stream-api.js | 98 + node_modules/regex-not/LICENSE | 21 + node_modules/regex-not/README.md | 133 + node_modules/regex-not/index.js | 72 + .../node_modules/extend-shallow/LICENSE | 21 + .../node_modules/extend-shallow/README.md | 97 + .../node_modules/extend-shallow/index.js | 60 + .../node_modules/extend-shallow/package.json | 83 + .../node_modules/is-extendable/LICENSE | 21 + .../node_modules/is-extendable/README.md | 88 + .../node_modules/is-extendable/index.d.ts | 5 + .../node_modules/is-extendable/index.js | 14 + .../node_modules/is-extendable/package.json | 67 + node_modules/regex-not/package.json | 63 + node_modules/registry-auth-token/.npmignore | 6 + node_modules/registry-auth-token/CHANGELOG.md | 112 + node_modules/registry-auth-token/LICENSE | 21 + node_modules/registry-auth-token/README.md | 65 + node_modules/registry-auth-token/base64.js | 14 + node_modules/registry-auth-token/index.js | 123 + .../registry-auth-token/node_modules/.bin/rc | 1 + .../node_modules/safe-buffer/LICENSE | 21 + .../node_modules/safe-buffer/README.md | 586 ++ .../node_modules/safe-buffer/index.d.ts | 187 + .../node_modules/safe-buffer/index.js | 64 + .../node_modules/safe-buffer/package.json | 37 + node_modules/registry-auth-token/package.json | 46 + .../registry-auth-token/registry-url.js | 5 + .../test/auth-token.test.js | 455 + .../test/registry-url.test.js | 64 + node_modules/registry-auth-token/yarn.lock | 1516 ++++ node_modules/registry-url/index.js | 6 + node_modules/registry-url/license | 21 + .../registry-url/node_modules/.bin/rc | 1 + node_modules/registry-url/package.json | 40 + node_modules/registry-url/readme.md | 50 + .../remove-trailing-separator/history.md | 17 + .../remove-trailing-separator/index.js | 17 + .../remove-trailing-separator/license | 3 + .../remove-trailing-separator/package.json | 37 + .../remove-trailing-separator/readme.md | 51 + node_modules/repeat-element/LICENSE | 21 + node_modules/repeat-element/README.md | 99 + node_modules/repeat-element/index.js | 18 + node_modules/repeat-element/package.json | 49 + node_modules/repeat-string/LICENSE | 21 + node_modules/repeat-string/README.md | 136 + node_modules/repeat-string/index.js | 70 + node_modules/repeat-string/package.json | 77 + node_modules/resolve-url/.jshintrc | 44 + node_modules/resolve-url/LICENSE | 21 + node_modules/resolve-url/bower.json | 15 + node_modules/resolve-url/changelog.md | 15 + node_modules/resolve-url/component.json | 15 + node_modules/resolve-url/package.json | 34 + node_modules/resolve-url/readme.md | 83 + node_modules/resolve-url/resolve-url.js | 47 + node_modules/resolve-url/test/resolve-url.js | 70 + node_modules/ret/LICENSE | 19 + node_modules/ret/README.md | 183 + node_modules/ret/lib/index.js | 282 + node_modules/ret/lib/positions.js | 17 + node_modules/ret/lib/sets.js | 82 + node_modules/ret/lib/types.js | 10 + node_modules/ret/lib/util.js | 111 + node_modules/ret/package.json | 35 + node_modules/rimraf/LICENSE | 15 + node_modules/rimraf/README.md | 101 + node_modules/rimraf/bin.js | 50 + node_modules/rimraf/package.json | 29 + node_modules/rimraf/rimraf.js | 364 + node_modules/safe-buffer/LICENSE | 21 + node_modules/safe-buffer/README.md | 584 ++ node_modules/safe-buffer/index.d.ts | 187 + node_modules/safe-buffer/index.js | 62 + node_modules/safe-buffer/package.json | 37 + node_modules/safe-regex/.travis.yml | 4 + node_modules/safe-regex/LICENSE | 18 + node_modules/safe-regex/example/safe.js | 3 + node_modules/safe-regex/index.js | 43 + node_modules/safe-regex/package.json | 43 + node_modules/safe-regex/readme.markdown | 65 + node_modules/safe-regex/test/regex.js | 50 + node_modules/safer-buffer/LICENSE | 21 + node_modules/safer-buffer/Porting-Buffer.md | 268 + node_modules/safer-buffer/Readme.md | 156 + node_modules/safer-buffer/dangerous.js | 58 + node_modules/safer-buffer/package.json | 34 + node_modules/safer-buffer/safer.js | 77 + node_modules/safer-buffer/tests.js | 406 + node_modules/sax/LICENSE | 41 + node_modules/sax/README.md | 225 + node_modules/sax/lib/sax.js | 1565 ++++ node_modules/sax/package.json | 25 + node_modules/semver-diff/index.js | 27 + node_modules/semver-diff/license | 21 + .../semver-diff/node_modules/.bin/semver | 1 + node_modules/semver-diff/package.json | 34 + node_modules/semver-diff/readme.md | 52 + node_modules/semver/CHANGELOG.md | 39 + node_modules/semver/LICENSE | 15 + node_modules/semver/README.md | 411 + node_modules/semver/bin/semver | 160 + node_modules/semver/package.json | 28 + node_modules/semver/range.bnf | 16 + node_modules/semver/semver.js | 1483 ++++ node_modules/send/HISTORY.md | 496 ++ node_modules/send/LICENSE | 23 + node_modules/send/README.md | 329 + node_modules/send/index.js | 1129 +++ node_modules/send/node_modules/.bin/mime | 1 + .../send/node_modules/http-errors/HISTORY.md | 154 + .../send/node_modules/http-errors/LICENSE | 23 + .../send/node_modules/http-errors/README.md | 163 + .../send/node_modules/http-errors/index.js | 266 + .../node_modules/http-errors/package.json | 49 + node_modules/send/node_modules/ms/index.js | 162 + node_modules/send/node_modules/ms/license.md | 21 + .../send/node_modules/ms/package.json | 37 + node_modules/send/node_modules/ms/readme.md | 60 + node_modules/send/package.json | 61 + node_modules/serve-static/HISTORY.md | 451 + node_modules/serve-static/LICENSE | 25 + node_modules/serve-static/README.md | 259 + node_modules/serve-static/index.js | 210 + node_modules/serve-static/package.json | 42 + node_modules/set-blocking/CHANGELOG.md | 26 + node_modules/set-blocking/LICENSE.txt | 14 + node_modules/set-blocking/README.md | 31 + node_modules/set-blocking/index.js | 7 + node_modules/set-blocking/package.json | 42 + node_modules/set-value/LICENSE | 21 + node_modules/set-value/README.md | 150 + node_modules/set-value/index.js | 55 + node_modules/set-value/package.json | 79 + node_modules/setprototypeof/LICENSE | 13 + node_modules/setprototypeof/README.md | 31 + node_modules/setprototypeof/index.d.ts | 2 + node_modules/setprototypeof/index.js | 17 + node_modules/setprototypeof/package.json | 36 + node_modules/setprototypeof/test/index.js | 24 + node_modules/shebang-command/index.js | 19 + node_modules/shebang-command/license | 21 + node_modules/shebang-command/package.json | 39 + node_modules/shebang-command/readme.md | 39 + node_modules/shebang-regex/index.js | 2 + node_modules/shebang-regex/license | 21 + node_modules/shebang-regex/package.json | 32 + node_modules/shebang-regex/readme.md | 29 + node_modules/signal-exit/CHANGELOG.md | 27 + node_modules/signal-exit/LICENSE.txt | 16 + node_modules/signal-exit/README.md | 40 + node_modules/signal-exit/index.js | 157 + node_modules/signal-exit/package.json | 38 + node_modules/signal-exit/signals.js | 53 + node_modules/snapdragon-node/LICENSE | 21 + node_modules/snapdragon-node/README.md | 453 + node_modules/snapdragon-node/index.js | 492 ++ .../node_modules/define-property/LICENSE | 21 + .../node_modules/define-property/README.md | 95 + .../node_modules/define-property/index.js | 31 + .../node_modules/define-property/package.json | 62 + node_modules/snapdragon-node/package.json | 76 + node_modules/snapdragon-util/LICENSE | 21 + node_modules/snapdragon-util/README.md | 807 ++ node_modules/snapdragon-util/index.js | 1019 +++ node_modules/snapdragon-util/package.json | 65 + node_modules/snapdragon/LICENSE | 21 + node_modules/snapdragon/README.md | 321 + node_modules/snapdragon/index.js | 174 + node_modules/snapdragon/lib/compiler.js | 177 + node_modules/snapdragon/lib/parser.js | 533 ++ node_modules/snapdragon/lib/position.js | 14 + node_modules/snapdragon/lib/source-maps.js | 145 + node_modules/snapdragon/lib/utils.js | 48 + node_modules/snapdragon/package.json | 79 + node_modules/source-map-resolve/.jshintrc | 46 + node_modules/source-map-resolve/.travis.yml | 3 + node_modules/source-map-resolve/LICENSE | 21 + node_modules/source-map-resolve/bower.json | 30 + node_modules/source-map-resolve/changelog.md | 100 + .../source-map-resolve/component.json | 29 + .../generate-source-map-resolve.js | 28 + .../lib/decode-uri-component.js | 11 + .../source-map-resolve/lib/resolve-url.js | 12 + .../lib/source-map-resolve-node.js | 302 + .../source-map-resolve/node_modules/.bin/atob | 1 + node_modules/source-map-resolve/package.json | 43 + node_modules/source-map-resolve/readme.md | 231 + .../source-map-resolve/source-map-resolve.js | 309 + .../source-map-resolve.js.template | 22 + .../source-map-resolve/test/common.js | 27 + node_modules/source-map-resolve/test/read.js | 105 + .../test/source-map-resolve.js | 1162 +++ .../source-map-resolve/test/windows.js | 166 + .../source-map-resolve/x-package.json5 | 68 + node_modules/source-map-url/.jshintrc | 43 + node_modules/source-map-url/LICENSE | 21 + node_modules/source-map-url/bower.json | 20 + node_modules/source-map-url/changelog.md | 52 + node_modules/source-map-url/component.json | 18 + node_modules/source-map-url/package.json | 39 + node_modules/source-map-url/readme.md | 97 + node_modules/source-map-url/source-map-url.js | 57 + .../source-map-url/test/source-map-url.js | 402 + node_modules/source-map-url/x-package.json5 | 55 + node_modules/source-map/CHANGELOG.md | 301 + node_modules/source-map/LICENSE | 28 + node_modules/source-map/README.md | 729 ++ .../source-map/dist/source-map.debug.js | 3091 +++++++ node_modules/source-map/dist/source-map.js | 3090 +++++++ .../source-map/dist/source-map.min.js | 2 + .../source-map/dist/source-map.min.js.map | 1 + node_modules/source-map/lib/array-set.js | 121 + node_modules/source-map/lib/base64-vlq.js | 140 + node_modules/source-map/lib/base64.js | 67 + node_modules/source-map/lib/binary-search.js | 111 + node_modules/source-map/lib/mapping-list.js | 79 + node_modules/source-map/lib/quick-sort.js | 114 + .../source-map/lib/source-map-consumer.js | 1082 +++ .../source-map/lib/source-map-generator.js | 416 + node_modules/source-map/lib/source-node.js | 413 + node_modules/source-map/lib/util.js | 417 + node_modules/source-map/package.json | 72 + node_modules/source-map/source-map.js | 8 + node_modules/split-string/LICENSE | 21 + node_modules/split-string/README.md | 321 + node_modules/split-string/index.js | 171 + .../node_modules/extend-shallow/LICENSE | 21 + .../node_modules/extend-shallow/README.md | 97 + .../node_modules/extend-shallow/index.js | 60 + .../node_modules/extend-shallow/package.json | 83 + .../node_modules/is-extendable/LICENSE | 21 + .../node_modules/is-extendable/README.md | 88 + .../node_modules/is-extendable/index.d.ts | 5 + .../node_modules/is-extendable/index.js | 14 + .../node_modules/is-extendable/package.json | 67 + node_modules/split-string/package.json | 65 + node_modules/static-extend/LICENSE | 21 + node_modules/static-extend/index.js | 90 + node_modules/static-extend/package.json | 63 + node_modules/statuses/HISTORY.md | 65 + node_modules/statuses/LICENSE | 23 + node_modules/statuses/README.md | 127 + node_modules/statuses/codes.json | 66 + node_modules/statuses/index.js | 113 + node_modules/statuses/package.json | 48 + node_modules/string-width/index.js | 36 + node_modules/string-width/license | 9 + .../node_modules/ansi-regex/index.js | 10 + .../node_modules/ansi-regex/license | 9 + .../node_modules/ansi-regex/package.json | 53 + .../node_modules/ansi-regex/readme.md | 46 + .../node_modules/strip-ansi/index.js | 4 + .../node_modules/strip-ansi/license | 9 + .../node_modules/strip-ansi/package.json | 52 + .../node_modules/strip-ansi/readme.md | 39 + node_modules/string-width/package.json | 55 + node_modules/string-width/readme.md | 42 + node_modules/string_decoder/.travis.yml | 50 + node_modules/string_decoder/LICENSE | 48 + node_modules/string_decoder/README.md | 47 + .../string_decoder/lib/string_decoder.js | 296 + node_modules/string_decoder/package.json | 31 + node_modules/strip-ansi/index.js | 6 + node_modules/strip-ansi/license | 21 + node_modules/strip-ansi/package.json | 57 + node_modules/strip-ansi/readme.md | 33 + node_modules/strip-eof/index.js | 15 + node_modules/strip-eof/license | 21 + node_modules/strip-eof/package.json | 39 + node_modules/strip-eof/readme.md | 28 + node_modules/strip-json-comments/index.js | 70 + node_modules/strip-json-comments/license | 21 + node_modules/strip-json-comments/package.json | 42 + node_modules/strip-json-comments/readme.md | 64 + node_modules/sucrase/CHANGELOG.md | 230 + node_modules/sucrase/LICENSE | 21 + node_modules/sucrase/README.md | 230 + node_modules/sucrase/bin/sucrase | 3 + node_modules/sucrase/bin/sucrase-node | 18 + .../sucrase/dist/CJSImportProcessor.d.ts | 66 + .../sucrase/dist/CJSImportProcessor.js | 461 + .../sucrase/dist/CJSImportProcessor.mjs | 461 + node_modules/sucrase/dist/HelperManager.d.ts | 17 + node_modules/sucrase/dist/HelperManager.js | 78 + node_modules/sucrase/dist/HelperManager.mjs | 78 + node_modules/sucrase/dist/NameManager.d.ts | 9 + node_modules/sucrase/dist/NameManager.js | 33 + node_modules/sucrase/dist/NameManager.mjs | 33 + node_modules/sucrase/dist/TokenProcessor.d.ts | 61 + node_modules/sucrase/dist/TokenProcessor.js | 244 + node_modules/sucrase/dist/TokenProcessor.mjs | 244 + node_modules/sucrase/dist/cli.d.ts | 1 + node_modules/sucrase/dist/cli.js | 106 + node_modules/sucrase/dist/cli.mjs | 106 + .../sucrase/dist/computeSourceMap.d.ts | 14 + node_modules/sucrase/dist/computeSourceMap.js | 35 + .../sucrase/dist/computeSourceMap.mjs | 35 + .../sucrase/dist/identifyShadowedGlobals.d.ts | 12 + .../sucrase/dist/identifyShadowedGlobals.js | 94 + .../sucrase/dist/identifyShadowedGlobals.mjs | 94 + node_modules/sucrase/dist/index.d.ts | 63 + node_modules/sucrase/dist/index.js | 152 + node_modules/sucrase/dist/index.mjs | 152 + node_modules/sucrase/dist/parser/index.d.ts | 8 + node_modules/sucrase/dist/parser/index.js | 31 + node_modules/sucrase/dist/parser/index.mjs | 31 + .../sucrase/dist/parser/plugins/flow.d.ts | 26 + .../sucrase/dist/parser/plugins/flow.js | 1047 +++ .../sucrase/dist/parser/plugins/flow.mjs | 1047 +++ .../dist/parser/plugins/jsx/index.d.ts | 2 + .../sucrase/dist/parser/plugins/jsx/index.js | 309 + .../sucrase/dist/parser/plugins/jsx/index.mjs | 309 + .../dist/parser/plugins/jsx/xhtml.d.ts | 4 + .../sucrase/dist/parser/plugins/jsx/xhtml.js | 256 + .../sucrase/dist/parser/plugins/jsx/xhtml.mjs | 256 + .../sucrase/dist/parser/plugins/types.d.ts | 5 + .../sucrase/dist/parser/plugins/types.js | 39 + .../sucrase/dist/parser/plugins/types.mjs | 39 + .../dist/parser/plugins/typescript.d.ts | 34 + .../sucrase/dist/parser/plugins/typescript.js | 1365 +++ .../dist/parser/plugins/typescript.mjs | 1365 +++ .../sucrase/dist/parser/tokenizer/index.d.ts | 55 + .../sucrase/dist/parser/tokenizer/index.js | 858 ++ .../sucrase/dist/parser/tokenizer/index.mjs | 858 ++ .../dist/parser/tokenizer/keywords.d.ts | 35 + .../sucrase/dist/parser/tokenizer/keywords.js | 35 + .../dist/parser/tokenizer/keywords.mjs | 35 + .../dist/parser/tokenizer/readWord.d.ts | 7 + .../sucrase/dist/parser/tokenizer/readWord.js | 64 + .../dist/parser/tokenizer/readWord.mjs | 64 + .../dist/parser/tokenizer/readWordTree.d.ts | 1 + .../dist/parser/tokenizer/readWordTree.js | 595 ++ .../dist/parser/tokenizer/readWordTree.mjs | 595 ++ .../sucrase/dist/parser/tokenizer/state.d.ts | 48 + .../sucrase/dist/parser/tokenizer/state.js | 100 + .../sucrase/dist/parser/tokenizer/state.mjs | 100 + .../sucrase/dist/parser/tokenizer/types.d.ts | 121 + .../sucrase/dist/parser/tokenizer/types.js | 347 + .../sucrase/dist/parser/tokenizer/types.mjs | 347 + .../sucrase/dist/parser/traverser/base.d.ts | 16 + .../sucrase/dist/parser/traverser/base.js | 60 + .../sucrase/dist/parser/traverser/base.mjs | 60 + .../dist/parser/traverser/expression.d.ts | 34 + .../dist/parser/traverser/expression.js | 950 ++ .../dist/parser/traverser/expression.mjs | 950 ++ .../sucrase/dist/parser/traverser/index.d.ts | 2 + .../sucrase/dist/parser/traverser/index.js | 18 + .../sucrase/dist/parser/traverser/index.mjs | 18 + .../sucrase/dist/parser/traverser/lval.d.ts | 9 + .../sucrase/dist/parser/traverser/lval.js | 154 + .../sucrase/dist/parser/traverser/lval.mjs | 154 + .../dist/parser/traverser/statement.d.ts | 20 + .../dist/parser/traverser/statement.js | 1115 +++ .../dist/parser/traverser/statement.mjs | 1115 +++ .../sucrase/dist/parser/traverser/util.d.ts | 16 + .../sucrase/dist/parser/traverser/util.js | 88 + .../sucrase/dist/parser/traverser/util.mjs | 88 + .../sucrase/dist/parser/util/charcodes.d.ts | 106 + .../sucrase/dist/parser/util/charcodes.js | 114 + .../sucrase/dist/parser/util/charcodes.mjs | 114 + .../sucrase/dist/parser/util/identifier.d.ts | 2 + .../sucrase/dist/parser/util/identifier.js | 34 + .../sucrase/dist/parser/util/identifier.mjs | 34 + .../sucrase/dist/parser/util/whitespace.d.ts | 2 + .../sucrase/dist/parser/util/whitespace.js | 31 + .../sucrase/dist/parser/util/whitespace.mjs | 31 + node_modules/sucrase/dist/register.d.ts | 9 + node_modules/sucrase/dist/register.js | 57 + node_modules/sucrase/dist/register.mjs | 57 + .../transformers/CJSImportTransformer.d.ts | 131 + .../dist/transformers/CJSImportTransformer.js | 777 ++ .../transformers/CJSImportTransformer.mjs | 777 ++ .../transformers/ESMImportTransformer.d.ts | 41 + .../dist/transformers/ESMImportTransformer.js | 310 + .../transformers/ESMImportTransformer.mjs | 310 + .../dist/transformers/FlowTransformer.d.ts | 9 + .../dist/transformers/FlowTransformer.js | 17 + .../dist/transformers/FlowTransformer.mjs | 17 + .../dist/transformers/JSXTransformer.d.ts | 44 + .../dist/transformers/JSXTransformer.js | 398 + .../dist/transformers/JSXTransformer.mjs | 398 + .../NumericSeparatorTransformer.d.ts | 7 + .../NumericSeparatorTransformer.js | 20 + .../NumericSeparatorTransformer.mjs | 20 + .../OptionalCatchBindingTransformer.d.ts | 9 + .../OptionalCatchBindingTransformer.js | 19 + .../OptionalCatchBindingTransformer.mjs | 19 + .../ReactDisplayNameTransformer.d.ts | 29 + .../ReactDisplayNameTransformer.js | 160 + .../ReactDisplayNameTransformer.mjs | 160 + .../ReactHotLoaderTransformer.d.ts | 12 + .../transformers/ReactHotLoaderTransformer.js | 67 + .../ReactHotLoaderTransformer.mjs | 67 + .../dist/transformers/RootTransformer.d.ts | 45 + .../dist/transformers/RootTransformer.js | 397 + .../dist/transformers/RootTransformer.mjs | 397 + .../dist/transformers/Transformer.d.ts | 5 + .../sucrase/dist/transformers/Transformer.js | 12 + .../sucrase/dist/transformers/Transformer.mjs | 12 + .../transformers/TypeScriptTransformer.d.ts | 17 + .../transformers/TypeScriptTransformer.js | 156 + .../transformers/TypeScriptTransformer.mjs | 156 + .../sucrase/dist/util/elideImportEquals.d.ts | 2 + .../sucrase/dist/util/elideImportEquals.js | 29 + .../sucrase/dist/util/elideImportEquals.mjs | 29 + .../sucrase/dist/util/formatTokens.d.ts | 2 + .../sucrase/dist/util/formatTokens.js | 71 + .../sucrase/dist/util/formatTokens.mjs | 71 + .../sucrase/dist/util/getClassInfo.d.ts | 34 + .../sucrase/dist/util/getClassInfo.js | 281 + .../sucrase/dist/util/getClassInfo.mjs | 281 + .../sucrase/dist/util/getDeclarationInfo.d.ts | 18 + .../sucrase/dist/util/getDeclarationInfo.js | 40 + .../sucrase/dist/util/getDeclarationInfo.mjs | 40 + .../sucrase/dist/util/getJSXPragmaInfo.d.ts | 8 + .../sucrase/dist/util/getJSXPragmaInfo.js | 22 + .../sucrase/dist/util/getJSXPragmaInfo.mjs | 22 + .../dist/util/getNonTypeIdentifiers.d.ts | 3 + .../dist/util/getNonTypeIdentifiers.js | 43 + .../dist/util/getNonTypeIdentifiers.mjs | 43 + .../sucrase/dist/util/getTSImportedNames.d.ts | 9 + .../sucrase/dist/util/getTSImportedNames.js | 88 + .../sucrase/dist/util/getTSImportedNames.mjs | 88 + .../sucrase/dist/util/isIdentifier.d.ts | 1 + .../sucrase/dist/util/isIdentifier.js | 70 + .../sucrase/dist/util/isIdentifier.mjs | 70 + .../dist/util/shouldElideDefaultExport.d.ts | 6 + .../dist/util/shouldElideDefaultExport.js | 37 + .../dist/util/shouldElideDefaultExport.mjs | 37 + node_modules/sucrase/package.json | 89 + node_modules/sucrase/register/index.js | 1 + node_modules/sucrase/register/js.js | 1 + node_modules/sucrase/register/jsx.js | 1 + .../register/ts-legacy-module-interop.js | 1 + node_modules/sucrase/register/ts.js | 1 + .../register/tsx-legacy-module-interop.js | 1 + node_modules/sucrase/register/tsx.js | 1 + node_modules/supports-color/browser.js | 5 + node_modules/supports-color/index.js | 131 + node_modules/supports-color/license | 9 + node_modules/supports-color/package.json | 53 + node_modules/supports-color/readme.md | 66 + node_modules/tar/LICENSE | 15 + node_modules/tar/README.md | 954 ++ node_modules/tar/index.js | 18 + node_modules/tar/lib/.mkdir.js.swp | Bin 0 -> 16384 bytes node_modules/tar/lib/buffer.js | 11 + node_modules/tar/lib/create.js | 105 + node_modules/tar/lib/extract.js | 112 + node_modules/tar/lib/header.js | 289 + node_modules/tar/lib/high-level-opt.js | 29 + node_modules/tar/lib/large-numbers.js | 97 + node_modules/tar/lib/list.js | 130 + node_modules/tar/lib/mkdir.js | 206 + node_modules/tar/lib/mode-fix.js | 14 + node_modules/tar/lib/pack.js | 404 + node_modules/tar/lib/parse.js | 423 + node_modules/tar/lib/pax.js | 146 + node_modules/tar/lib/read-entry.js | 94 + node_modules/tar/lib/replace.js | 220 + node_modules/tar/lib/types.js | 44 + node_modules/tar/lib/unpack.js | 621 ++ node_modules/tar/lib/update.js | 36 + node_modules/tar/lib/warn-mixin.js | 14 + node_modules/tar/lib/winchars.js | 23 + node_modules/tar/lib/write-entry.js | 422 + node_modules/tar/node_modules/.bin/mkdirp | 1 + .../tar/node_modules/safe-buffer/LICENSE | 21 + .../tar/node_modules/safe-buffer/README.md | 586 ++ .../tar/node_modules/safe-buffer/index.d.ts | 187 + .../tar/node_modules/safe-buffer/index.js | 64 + .../tar/node_modules/safe-buffer/package.json | 37 + node_modules/tar/package.json | 49 + node_modules/term-size/index.js | 70 + node_modules/term-size/license | 21 + node_modules/term-size/package.json | 43 + node_modules/term-size/readme.md | 41 + node_modules/term-size/vendor/macos/term-size | Bin 0 -> 8760 bytes .../term-size/vendor/windows/term-size.exe | Bin 0 -> 17408 bytes node_modules/thenify-all/History.md | 11 + node_modules/thenify-all/LICENSE | 22 + node_modules/thenify-all/README.md | 66 + node_modules/thenify-all/index.js | 73 + node_modules/thenify-all/package.json | 34 + node_modules/thenify/History.md | 5 + node_modules/thenify/LICENSE | 22 + node_modules/thenify/README.md | 120 + node_modules/thenify/index.js | 80 + node_modules/thenify/package.json | 31 + node_modules/timed-out/index.js | 55 + node_modules/timed-out/license | 21 + node_modules/timed-out/package.json | 36 + node_modules/timed-out/readme.md | 42 + node_modules/to-object-path/LICENSE | 21 + node_modules/to-object-path/README.md | 71 + node_modules/to-object-path/index.js | 33 + node_modules/to-object-path/package.json | 48 + node_modules/to-regex-range/LICENSE | 21 + node_modules/to-regex-range/README.md | 281 + node_modules/to-regex-range/index.js | 294 + node_modules/to-regex-range/package.json | 86 + node_modules/to-regex/LICENSE | 21 + node_modules/to-regex/README.md | 205 + node_modules/to-regex/index.js | 155 + .../node_modules/define-property/CHANGELOG.md | 82 + .../node_modules/define-property/LICENSE | 21 + .../node_modules/define-property/README.md | 117 + .../node_modules/define-property/index.js | 38 + .../node_modules/define-property/package.json | 67 + .../node_modules/extend-shallow/LICENSE | 21 + .../node_modules/extend-shallow/README.md | 97 + .../node_modules/extend-shallow/index.js | 60 + .../node_modules/extend-shallow/package.json | 83 + .../node_modules/is-extendable/LICENSE | 21 + .../node_modules/is-extendable/README.md | 88 + .../node_modules/is-extendable/index.d.ts | 5 + .../node_modules/is-extendable/index.js | 14 + .../node_modules/is-extendable/package.json | 67 + node_modules/to-regex/package.json | 62 + node_modules/toidentifier/LICENSE | 21 + node_modules/toidentifier/README.md | 61 + node_modules/toidentifier/index.js | 30 + node_modules/toidentifier/package.json | 34 + node_modules/touch/LICENSE | 15 + node_modules/touch/README.md | 52 + node_modules/touch/bin/nodetouch.js | 112 + node_modules/touch/index.js | 224 + node_modules/touch/node_modules/.bin/nopt | 1 + node_modules/touch/package.json | 28 + node_modules/type-is/HISTORY.md | 259 + node_modules/type-is/LICENSE | 23 + node_modules/type-is/README.md | 170 + node_modules/type-is/index.js | 266 + node_modules/type-is/package.json | 45 + node_modules/undefsafe/.jscsrc | 13 + node_modules/undefsafe/.jshintrc | 16 + node_modules/undefsafe/.npmignore | 2 + node_modules/undefsafe/.travis.yml | 18 + node_modules/undefsafe/LICENSE | 22 + node_modules/undefsafe/README.md | 63 + node_modules/undefsafe/example.js | 14 + node_modules/undefsafe/lib/undefsafe.js | 113 + node_modules/undefsafe/package.json | 32 + node_modules/union-value/LICENSE | 21 + node_modules/union-value/README.md | 73 + node_modules/union-value/index.js | 30 + node_modules/union-value/package.json | 70 + node_modules/unique-string/index.js | 4 + node_modules/unique-string/license | 21 + node_modules/unique-string/package.json | 44 + node_modules/unique-string/readme.md | 32 + node_modules/unpipe/HISTORY.md | 4 + node_modules/unpipe/LICENSE | 22 + node_modules/unpipe/README.md | 43 + node_modules/unpipe/index.js | 69 + node_modules/unpipe/package.json | 27 + node_modules/unset-value/LICENSE | 21 + node_modules/unset-value/README.md | 131 + node_modules/unset-value/index.js | 32 + .../node_modules/has-value/LICENSE | 21 + .../node_modules/has-value/README.md | 130 + .../node_modules/has-value/index.js | 19 + .../has-value/node_modules/isobject/LICENSE | 21 + .../has-value/node_modules/isobject/README.md | 112 + .../has-value/node_modules/isobject/index.js | 14 + .../node_modules/isobject/package.json | 67 + .../node_modules/has-value/package.json | 81 + .../node_modules/has-values/LICENSE | 21 + .../node_modules/has-values/README.md | 114 + .../node_modules/has-values/index.js | 36 + .../node_modules/has-values/package.json | 75 + node_modules/unset-value/package.json | 71 + node_modules/unzip-response/index.js | 36 + node_modules/unzip-response/license | 21 + node_modules/unzip-response/package.json | 49 + node_modules/unzip-response/readme.md | 29 + node_modules/upath/LICENSE | 22 + node_modules/upath/build/code/upath.js | 171 + node_modules/upath/package.json | 60 + node_modules/upath/readme.md | 335 + node_modules/upath/upath.d.ts | 239 + node_modules/update-notifier/check.js | 22 + node_modules/update-notifier/index.js | 155 + node_modules/update-notifier/license | 9 + .../update-notifier/node_modules/.bin/is-ci | 1 + node_modules/update-notifier/package.json | 55 + node_modules/update-notifier/readme.md | 193 + node_modules/urix/.jshintrc | 42 + node_modules/urix/LICENSE | 21 + node_modules/urix/index.js | 17 + node_modules/urix/package.json | 25 + node_modules/urix/readme.md | 46 + node_modules/urix/test/index.js | 43 + node_modules/url-parse-lax/index.js | 14 + node_modules/url-parse-lax/license | 21 + node_modules/url-parse-lax/package.json | 41 + node_modules/url-parse-lax/readme.md | 100 + node_modules/use/LICENSE | 21 + node_modules/use/README.md | 90 + node_modules/use/index.js | 155 + node_modules/use/package.json | 66 + node_modules/util-deprecate/History.md | 16 + node_modules/util-deprecate/LICENSE | 24 + node_modules/util-deprecate/README.md | 53 + node_modules/util-deprecate/browser.js | 67 + node_modules/util-deprecate/node.js | 6 + node_modules/util-deprecate/package.json | 27 + node_modules/utils-merge/.npmignore | 9 + node_modules/utils-merge/LICENSE | 20 + node_modules/utils-merge/README.md | 34 + node_modules/utils-merge/index.js | 23 + node_modules/utils-merge/package.json | 40 + node_modules/vary/HISTORY.md | 39 + node_modules/vary/LICENSE | 22 + node_modules/vary/README.md | 101 + node_modules/vary/index.js | 149 + node_modules/vary/package.json | 43 + node_modules/which/CHANGELOG.md | 152 + node_modules/which/LICENSE | 15 + node_modules/which/README.md | 51 + node_modules/which/bin/which | 52 + node_modules/which/package.json | 30 + node_modules/which/which.js | 135 + node_modules/wide-align/LICENSE | 14 + node_modules/wide-align/README.md | 47 + node_modules/wide-align/align.js | 65 + node_modules/wide-align/package.json | 33 + node_modules/widest-line/index.js | 8 + node_modules/widest-line/license | 9 + node_modules/widest-line/package.json | 54 + node_modules/widest-line/readme.md | 34 + node_modules/wrappy/LICENSE | 15 + node_modules/wrappy/README.md | 36 + node_modules/wrappy/package.json | 29 + node_modules/wrappy/wrappy.js | 33 + node_modules/write-file-atomic/CHANGELOG.md | 25 + node_modules/write-file-atomic/LICENSE | 6 + node_modules/write-file-atomic/README.md | 56 + node_modules/write-file-atomic/index.js | 238 + node_modules/write-file-atomic/package.json | 41 + node_modules/xdg-basedir/index.js | 28 + node_modules/xdg-basedir/license | 21 + node_modules/xdg-basedir/package.json | 40 + node_modules/xdg-basedir/readme.md | 60 + node_modules/yallist/LICENSE | 15 + node_modules/yallist/README.md | 204 + node_modules/yallist/iterator.js | 8 + node_modules/yallist/package.json | 29 + node_modules/yallist/yallist.js | 376 + nodemon.json | 5 + package.json | 16 + src/app.js | 19 + src/routes.js | 9 + src/server.js | 3 + yarn.lock | 2073 +++++ 2657 files changed, 299780 insertions(+) create mode 120000 node_modules/.bin/atob create mode 120000 node_modules/.bin/detect-libc create mode 120000 node_modules/.bin/is-ci create mode 120000 node_modules/.bin/mime create mode 120000 node_modules/.bin/mkdirp create mode 120000 node_modules/.bin/needle create mode 120000 node_modules/.bin/node-pre-gyp create mode 120000 node_modules/.bin/nodemon create mode 120000 node_modules/.bin/nodetouch create mode 120000 node_modules/.bin/nopt create mode 120000 node_modules/.bin/rc create mode 120000 node_modules/.bin/rimraf create mode 120000 node_modules/.bin/semver create mode 120000 node_modules/.bin/sucrase create mode 120000 node_modules/.bin/sucrase-node create mode 120000 node_modules/.bin/which create mode 100644 node_modules/.yarn-integrity create mode 100644 node_modules/abbrev/LICENSE create mode 100644 node_modules/abbrev/README.md create mode 100644 node_modules/abbrev/abbrev.js create mode 100644 node_modules/abbrev/package.json create mode 100644 node_modules/accepts/HISTORY.md create mode 100644 node_modules/accepts/LICENSE create mode 100644 node_modules/accepts/README.md create mode 100644 node_modules/accepts/index.js create mode 100644 node_modules/accepts/package.json create mode 100644 node_modules/ansi-align/CHANGELOG.md create mode 100644 node_modules/ansi-align/LICENSE create mode 100644 node_modules/ansi-align/README.md create mode 100644 node_modules/ansi-align/index.js create mode 100644 node_modules/ansi-align/package.json create mode 100644 node_modules/ansi-regex/index.js create mode 100644 node_modules/ansi-regex/license create mode 100644 node_modules/ansi-regex/package.json create mode 100644 node_modules/ansi-regex/readme.md create mode 100644 node_modules/ansi-styles/index.js create mode 100644 node_modules/ansi-styles/license create mode 100644 node_modules/ansi-styles/package.json create mode 100644 node_modules/ansi-styles/readme.md create mode 100644 node_modules/any-promise/.jshintrc create mode 100644 node_modules/any-promise/.npmignore create mode 100644 node_modules/any-promise/LICENSE create mode 100644 node_modules/any-promise/README.md create mode 100644 node_modules/any-promise/implementation.d.ts create mode 100644 node_modules/any-promise/implementation.js create mode 100644 node_modules/any-promise/index.d.ts create mode 100644 node_modules/any-promise/index.js create mode 100644 node_modules/any-promise/loader.js create mode 100644 node_modules/any-promise/optional.js create mode 100644 node_modules/any-promise/package.json create mode 100644 node_modules/any-promise/register-shim.js create mode 100644 node_modules/any-promise/register.d.ts create mode 100644 node_modules/any-promise/register.js create mode 100644 node_modules/any-promise/register/bluebird.d.ts create mode 100644 node_modules/any-promise/register/bluebird.js create mode 100644 node_modules/any-promise/register/es6-promise.d.ts create mode 100644 node_modules/any-promise/register/es6-promise.js create mode 100644 node_modules/any-promise/register/lie.d.ts create mode 100644 node_modules/any-promise/register/lie.js create mode 100644 node_modules/any-promise/register/native-promise-only.d.ts create mode 100644 node_modules/any-promise/register/native-promise-only.js create mode 100644 node_modules/any-promise/register/pinkie.d.ts create mode 100644 node_modules/any-promise/register/pinkie.js create mode 100644 node_modules/any-promise/register/promise.d.ts create mode 100644 node_modules/any-promise/register/promise.js create mode 100644 node_modules/any-promise/register/q.d.ts create mode 100644 node_modules/any-promise/register/q.js create mode 100644 node_modules/any-promise/register/rsvp.d.ts create mode 100644 node_modules/any-promise/register/rsvp.js create mode 100644 node_modules/any-promise/register/vow.d.ts create mode 100644 node_modules/any-promise/register/vow.js create mode 100644 node_modules/any-promise/register/when.d.ts create mode 100644 node_modules/any-promise/register/when.js create mode 100644 node_modules/anymatch/LICENSE create mode 100644 node_modules/anymatch/README.md create mode 100644 node_modules/anymatch/index.js create mode 100644 node_modules/anymatch/node_modules/normalize-path/LICENSE create mode 100644 node_modules/anymatch/node_modules/normalize-path/README.md create mode 100644 node_modules/anymatch/node_modules/normalize-path/index.js create mode 100644 node_modules/anymatch/node_modules/normalize-path/package.json create mode 100644 node_modules/anymatch/package.json create mode 100644 node_modules/aproba/LICENSE create mode 100644 node_modules/aproba/README.md create mode 100644 node_modules/aproba/index.js create mode 100644 node_modules/aproba/package.json create mode 100644 node_modules/are-we-there-yet/CHANGES.md create mode 100644 node_modules/are-we-there-yet/LICENSE create mode 100644 node_modules/are-we-there-yet/README.md create mode 100644 node_modules/are-we-there-yet/index.js create mode 100644 node_modules/are-we-there-yet/package.json create mode 100644 node_modules/are-we-there-yet/tracker-base.js create mode 100644 node_modules/are-we-there-yet/tracker-group.js create mode 100644 node_modules/are-we-there-yet/tracker-stream.js create mode 100644 node_modules/are-we-there-yet/tracker.js create mode 100755 node_modules/arr-diff/LICENSE create mode 100644 node_modules/arr-diff/README.md create mode 100644 node_modules/arr-diff/index.js create mode 100644 node_modules/arr-diff/package.json create mode 100755 node_modules/arr-flatten/LICENSE create mode 100755 node_modules/arr-flatten/README.md create mode 100644 node_modules/arr-flatten/index.js create mode 100644 node_modules/arr-flatten/package.json create mode 100644 node_modules/arr-union/LICENSE create mode 100644 node_modules/arr-union/README.md create mode 100644 node_modules/arr-union/index.js create mode 100644 node_modules/arr-union/package.json create mode 100644 node_modules/array-flatten/LICENSE create mode 100644 node_modules/array-flatten/README.md create mode 100644 node_modules/array-flatten/array-flatten.js create mode 100644 node_modules/array-flatten/package.json create mode 100755 node_modules/array-unique/LICENSE create mode 100755 node_modules/array-unique/README.md create mode 100644 node_modules/array-unique/index.js create mode 100644 node_modules/array-unique/package.json create mode 100644 node_modules/assign-symbols/LICENSE create mode 100644 node_modules/assign-symbols/README.md create mode 100644 node_modules/assign-symbols/index.js create mode 100644 node_modules/assign-symbols/package.json create mode 100644 node_modules/async-each/README.md create mode 100644 node_modules/async-each/index.js create mode 100644 node_modules/async-each/package.json create mode 100644 node_modules/atob/LICENSE create mode 100644 node_modules/atob/LICENSE.DOCS create mode 100644 node_modules/atob/README.md create mode 100755 node_modules/atob/bin/atob.js create mode 100644 node_modules/atob/bower.json create mode 100644 node_modules/atob/browser-atob.js create mode 100644 node_modules/atob/node-atob.js create mode 100644 node_modules/atob/package.json create mode 100644 node_modules/atob/test.js create mode 100644 node_modules/balanced-match/.npmignore create mode 100644 node_modules/balanced-match/LICENSE.md create mode 100644 node_modules/balanced-match/README.md create mode 100644 node_modules/balanced-match/index.js create mode 100644 node_modules/balanced-match/package.json create mode 100644 node_modules/base/LICENSE create mode 100644 node_modules/base/README.md create mode 100644 node_modules/base/index.js create mode 100644 node_modules/base/node_modules/define-property/LICENSE create mode 100644 node_modules/base/node_modules/define-property/README.md create mode 100644 node_modules/base/node_modules/define-property/index.js create mode 100644 node_modules/base/node_modules/define-property/package.json create mode 100644 node_modules/base/package.json create mode 100644 node_modules/binary-extensions/binary-extensions.json create mode 100644 node_modules/binary-extensions/license create mode 100644 node_modules/binary-extensions/package.json create mode 100644 node_modules/binary-extensions/readme.md create mode 100644 node_modules/body-parser/HISTORY.md create mode 100644 node_modules/body-parser/LICENSE create mode 100644 node_modules/body-parser/README.md create mode 100644 node_modules/body-parser/index.js create mode 100644 node_modules/body-parser/lib/read.js create mode 100644 node_modules/body-parser/lib/types/json.js create mode 100644 node_modules/body-parser/lib/types/raw.js create mode 100644 node_modules/body-parser/lib/types/text.js create mode 100644 node_modules/body-parser/lib/types/urlencoded.js create mode 100644 node_modules/body-parser/package.json create mode 100644 node_modules/boxen/index.js create mode 100644 node_modules/boxen/license create mode 100644 node_modules/boxen/package.json create mode 100644 node_modules/boxen/readme.md create mode 100644 node_modules/brace-expansion/LICENSE create mode 100644 node_modules/brace-expansion/README.md create mode 100644 node_modules/brace-expansion/index.js create mode 100644 node_modules/brace-expansion/package.json create mode 100644 node_modules/braces/LICENSE create mode 100644 node_modules/braces/README.md create mode 100644 node_modules/braces/index.js create mode 100644 node_modules/braces/lib/braces.js create mode 100644 node_modules/braces/lib/compilers.js create mode 100644 node_modules/braces/lib/parsers.js create mode 100644 node_modules/braces/lib/utils.js create mode 100644 node_modules/braces/package.json create mode 100644 node_modules/bytes/History.md create mode 100644 node_modules/bytes/LICENSE create mode 100644 node_modules/bytes/Readme.md create mode 100644 node_modules/bytes/index.js create mode 100644 node_modules/bytes/package.json create mode 100644 node_modules/cache-base/LICENSE create mode 100644 node_modules/cache-base/README.md create mode 100644 node_modules/cache-base/index.js create mode 100644 node_modules/cache-base/package.json create mode 100644 node_modules/camelcase/index.js create mode 100644 node_modules/camelcase/license create mode 100644 node_modules/camelcase/package.json create mode 100644 node_modules/camelcase/readme.md create mode 100644 node_modules/capture-stack-trace/index.js create mode 100644 node_modules/capture-stack-trace/license create mode 100644 node_modules/capture-stack-trace/package.json create mode 100644 node_modules/capture-stack-trace/readme.md create mode 100644 node_modules/chalk/index.js create mode 100644 node_modules/chalk/index.js.flow create mode 100644 node_modules/chalk/license create mode 100644 node_modules/chalk/package.json create mode 100644 node_modules/chalk/readme.md create mode 100644 node_modules/chalk/templates.js create mode 100644 node_modules/chalk/types/index.d.ts create mode 100644 node_modules/chokidar/CHANGELOG.md create mode 100644 node_modules/chokidar/README.md create mode 100644 node_modules/chokidar/index.js create mode 100644 node_modules/chokidar/lib/fsevents-handler.js create mode 100644 node_modules/chokidar/lib/nodefs-handler.js create mode 100644 node_modules/chokidar/package.json create mode 100644 node_modules/chokidar/types/index.d.ts create mode 100644 node_modules/chownr/LICENSE create mode 100644 node_modules/chownr/README.md create mode 100644 node_modules/chownr/chownr.js create mode 100644 node_modules/chownr/package.json create mode 100644 node_modules/ci-info/CHANGELOG.md create mode 100644 node_modules/ci-info/LICENSE create mode 100644 node_modules/ci-info/README.md create mode 100644 node_modules/ci-info/index.js create mode 100644 node_modules/ci-info/package.json create mode 100644 node_modules/ci-info/vendors.json create mode 100644 node_modules/class-utils/LICENSE create mode 100644 node_modules/class-utils/README.md create mode 100644 node_modules/class-utils/index.js create mode 100644 node_modules/class-utils/package.json create mode 100644 node_modules/cli-boxes/boxes.json create mode 100644 node_modules/cli-boxes/index.js create mode 100644 node_modules/cli-boxes/license create mode 100644 node_modules/cli-boxes/package.json create mode 100644 node_modules/cli-boxes/readme.md create mode 100644 node_modules/code-point-at/index.js create mode 100644 node_modules/code-point-at/license create mode 100644 node_modules/code-point-at/package.json create mode 100644 node_modules/code-point-at/readme.md create mode 100644 node_modules/collection-visit/LICENSE create mode 100644 node_modules/collection-visit/README.md create mode 100644 node_modules/collection-visit/index.js create mode 100644 node_modules/collection-visit/package.json create mode 100644 node_modules/color-convert/CHANGELOG.md create mode 100644 node_modules/color-convert/LICENSE create mode 100644 node_modules/color-convert/README.md create mode 100644 node_modules/color-convert/conversions.js create mode 100644 node_modules/color-convert/index.js create mode 100644 node_modules/color-convert/package.json create mode 100644 node_modules/color-convert/route.js create mode 100644 node_modules/color-name/.eslintrc.json create mode 100644 node_modules/color-name/.npmignore create mode 100644 node_modules/color-name/LICENSE create mode 100644 node_modules/color-name/README.md create mode 100644 node_modules/color-name/index.js create mode 100644 node_modules/color-name/package.json create mode 100644 node_modules/color-name/test.js create mode 100644 node_modules/commander/CHANGELOG.md create mode 100644 node_modules/commander/LICENSE create mode 100644 node_modules/commander/Readme.md create mode 100644 node_modules/commander/index.js create mode 100644 node_modules/commander/package.json create mode 100644 node_modules/commander/typings/index.d.ts create mode 100644 node_modules/component-emitter/History.md create mode 100644 node_modules/component-emitter/LICENSE create mode 100644 node_modules/component-emitter/Readme.md create mode 100644 node_modules/component-emitter/index.js create mode 100644 node_modules/component-emitter/package.json create mode 100644 node_modules/concat-map/.travis.yml create mode 100644 node_modules/concat-map/LICENSE create mode 100644 node_modules/concat-map/README.markdown create mode 100644 node_modules/concat-map/example/map.js create mode 100644 node_modules/concat-map/index.js create mode 100644 node_modules/concat-map/package.json create mode 100644 node_modules/concat-map/test/map.js create mode 100644 node_modules/configstore/index.js create mode 100644 node_modules/configstore/license create mode 100644 node_modules/configstore/package.json create mode 100644 node_modules/configstore/readme.md create mode 100644 node_modules/console-control-strings/LICENSE create mode 100644 node_modules/console-control-strings/README.md create mode 100644 node_modules/console-control-strings/README.md~ create mode 100644 node_modules/console-control-strings/index.js create mode 100644 node_modules/console-control-strings/package.json create mode 100644 node_modules/content-disposition/HISTORY.md create mode 100644 node_modules/content-disposition/LICENSE create mode 100644 node_modules/content-disposition/README.md create mode 100644 node_modules/content-disposition/index.js create mode 100644 node_modules/content-disposition/package.json create mode 100644 node_modules/content-type/HISTORY.md create mode 100644 node_modules/content-type/LICENSE create mode 100644 node_modules/content-type/README.md create mode 100644 node_modules/content-type/index.js create mode 100644 node_modules/content-type/package.json create mode 100644 node_modules/cookie-signature/.npmignore create mode 100644 node_modules/cookie-signature/History.md create mode 100644 node_modules/cookie-signature/Readme.md create mode 100644 node_modules/cookie-signature/index.js create mode 100644 node_modules/cookie-signature/package.json create mode 100644 node_modules/cookie/HISTORY.md create mode 100644 node_modules/cookie/LICENSE create mode 100644 node_modules/cookie/README.md create mode 100644 node_modules/cookie/index.js create mode 100644 node_modules/cookie/package.json create mode 100644 node_modules/copy-descriptor/LICENSE create mode 100644 node_modules/copy-descriptor/index.js create mode 100644 node_modules/copy-descriptor/package.json create mode 100644 node_modules/core-util-is/LICENSE create mode 100644 node_modules/core-util-is/README.md create mode 100644 node_modules/core-util-is/float.patch create mode 100644 node_modules/core-util-is/lib/util.js create mode 100644 node_modules/core-util-is/package.json create mode 100644 node_modules/core-util-is/test.js create mode 100644 node_modules/create-error-class/index.js create mode 100644 node_modules/create-error-class/license create mode 100644 node_modules/create-error-class/package.json create mode 100644 node_modules/create-error-class/readme.md create mode 100644 node_modules/cross-spawn/CHANGELOG.md create mode 100644 node_modules/cross-spawn/LICENSE create mode 100644 node_modules/cross-spawn/README.md create mode 100644 node_modules/cross-spawn/index.js create mode 100644 node_modules/cross-spawn/lib/enoent.js create mode 100644 node_modules/cross-spawn/lib/parse.js create mode 100644 node_modules/cross-spawn/lib/util/escapeArgument.js create mode 100644 node_modules/cross-spawn/lib/util/escapeCommand.js create mode 100644 node_modules/cross-spawn/lib/util/hasEmptyArgumentBug.js create mode 100644 node_modules/cross-spawn/lib/util/readShebang.js create mode 100644 node_modules/cross-spawn/lib/util/resolveCommand.js create mode 120000 node_modules/cross-spawn/node_modules/.bin/which create mode 100644 node_modules/cross-spawn/package.json create mode 100644 node_modules/crypto-random-string/index.js create mode 100644 node_modules/crypto-random-string/license create mode 100644 node_modules/crypto-random-string/package.json create mode 100644 node_modules/crypto-random-string/readme.md create mode 100644 node_modules/debug/.coveralls.yml create mode 100644 node_modules/debug/.eslintrc create mode 100644 node_modules/debug/.npmignore create mode 100644 node_modules/debug/.travis.yml create mode 100644 node_modules/debug/CHANGELOG.md create mode 100644 node_modules/debug/LICENSE create mode 100644 node_modules/debug/Makefile create mode 100644 node_modules/debug/README.md create mode 100644 node_modules/debug/component.json create mode 100644 node_modules/debug/karma.conf.js create mode 100644 node_modules/debug/node.js create mode 100644 node_modules/debug/package.json create mode 100644 node_modules/debug/src/browser.js create mode 100644 node_modules/debug/src/debug.js create mode 100644 node_modules/debug/src/index.js create mode 100644 node_modules/debug/src/inspector-log.js create mode 100644 node_modules/debug/src/node.js create mode 100644 node_modules/decode-uri-component/index.js create mode 100644 node_modules/decode-uri-component/license create mode 100644 node_modules/decode-uri-component/package.json create mode 100644 node_modules/decode-uri-component/readme.md create mode 100644 node_modules/deep-extend/CHANGELOG.md create mode 100644 node_modules/deep-extend/LICENSE create mode 100644 node_modules/deep-extend/README.md create mode 100644 node_modules/deep-extend/index.js create mode 100644 node_modules/deep-extend/lib/deep-extend.js create mode 100644 node_modules/deep-extend/package.json create mode 100644 node_modules/define-property/LICENSE create mode 100644 node_modules/define-property/README.md create mode 100644 node_modules/define-property/index.js create mode 100644 node_modules/define-property/node_modules/is-accessor-descriptor/LICENSE create mode 100644 node_modules/define-property/node_modules/is-accessor-descriptor/README.md create mode 100644 node_modules/define-property/node_modules/is-accessor-descriptor/index.js create mode 100644 node_modules/define-property/node_modules/is-accessor-descriptor/node_modules/kind-of/LICENSE create mode 100644 node_modules/define-property/node_modules/is-accessor-descriptor/node_modules/kind-of/README.md create mode 100644 node_modules/define-property/node_modules/is-accessor-descriptor/node_modules/kind-of/index.js create mode 100644 node_modules/define-property/node_modules/is-accessor-descriptor/node_modules/kind-of/package.json create mode 100644 node_modules/define-property/node_modules/is-accessor-descriptor/package.json create mode 100644 node_modules/define-property/node_modules/is-data-descriptor/LICENSE create mode 100644 node_modules/define-property/node_modules/is-data-descriptor/README.md create mode 100644 node_modules/define-property/node_modules/is-data-descriptor/index.js create mode 100644 node_modules/define-property/node_modules/is-data-descriptor/node_modules/kind-of/LICENSE create mode 100644 node_modules/define-property/node_modules/is-data-descriptor/node_modules/kind-of/README.md create mode 100644 node_modules/define-property/node_modules/is-data-descriptor/node_modules/kind-of/index.js create mode 100644 node_modules/define-property/node_modules/is-data-descriptor/node_modules/kind-of/package.json create mode 100644 node_modules/define-property/node_modules/is-data-descriptor/package.json create mode 100644 node_modules/define-property/node_modules/is-descriptor/LICENSE create mode 100644 node_modules/define-property/node_modules/is-descriptor/README.md create mode 100644 node_modules/define-property/node_modules/is-descriptor/index.js create mode 100644 node_modules/define-property/node_modules/is-descriptor/package.json create mode 100644 node_modules/define-property/node_modules/kind-of/LICENSE create mode 100644 node_modules/define-property/node_modules/kind-of/README.md create mode 100644 node_modules/define-property/node_modules/kind-of/index.js create mode 100644 node_modules/define-property/node_modules/kind-of/package.json create mode 100644 node_modules/define-property/package.json create mode 100644 node_modules/delegates/.npmignore create mode 100644 node_modules/delegates/History.md create mode 100644 node_modules/delegates/License create mode 100644 node_modules/delegates/Makefile create mode 100644 node_modules/delegates/Readme.md create mode 100644 node_modules/delegates/index.js create mode 100644 node_modules/delegates/package.json create mode 100644 node_modules/delegates/test/index.js create mode 100644 node_modules/depd/History.md create mode 100644 node_modules/depd/LICENSE create mode 100644 node_modules/depd/Readme.md create mode 100644 node_modules/depd/index.js create mode 100644 node_modules/depd/lib/browser/index.js create mode 100644 node_modules/depd/lib/compat/callsite-tostring.js create mode 100644 node_modules/depd/lib/compat/event-listener-count.js create mode 100644 node_modules/depd/lib/compat/index.js create mode 100644 node_modules/depd/package.json create mode 100644 node_modules/destroy/LICENSE create mode 100644 node_modules/destroy/README.md create mode 100644 node_modules/destroy/index.js create mode 100644 node_modules/destroy/package.json create mode 100644 node_modules/detect-libc/.npmignore create mode 100644 node_modules/detect-libc/LICENSE create mode 100644 node_modules/detect-libc/README.md create mode 100755 node_modules/detect-libc/bin/detect-libc.js create mode 100644 node_modules/detect-libc/lib/detect-libc.js create mode 100644 node_modules/detect-libc/package.json create mode 100644 node_modules/dot-prop/index.js create mode 100644 node_modules/dot-prop/license create mode 100644 node_modules/dot-prop/package.json create mode 100644 node_modules/dot-prop/readme.md create mode 100644 node_modules/duplexer3/LICENSE.md create mode 100644 node_modules/duplexer3/README.md create mode 100644 node_modules/duplexer3/index.js create mode 100644 node_modules/duplexer3/package.json create mode 100644 node_modules/ee-first/LICENSE create mode 100644 node_modules/ee-first/README.md create mode 100644 node_modules/ee-first/index.js create mode 100644 node_modules/ee-first/package.json create mode 100644 node_modules/encodeurl/HISTORY.md create mode 100644 node_modules/encodeurl/LICENSE create mode 100644 node_modules/encodeurl/README.md create mode 100644 node_modules/encodeurl/index.js create mode 100644 node_modules/encodeurl/package.json create mode 100644 node_modules/escape-html/LICENSE create mode 100644 node_modules/escape-html/Readme.md create mode 100644 node_modules/escape-html/index.js create mode 100644 node_modules/escape-html/package.json create mode 100644 node_modules/escape-string-regexp/index.js create mode 100644 node_modules/escape-string-regexp/license create mode 100644 node_modules/escape-string-regexp/package.json create mode 100644 node_modules/escape-string-regexp/readme.md create mode 100644 node_modules/etag/HISTORY.md create mode 100644 node_modules/etag/LICENSE create mode 100644 node_modules/etag/README.md create mode 100644 node_modules/etag/index.js create mode 100644 node_modules/etag/package.json create mode 100644 node_modules/execa/index.js create mode 100644 node_modules/execa/lib/errname.js create mode 100644 node_modules/execa/lib/stdio.js create mode 100644 node_modules/execa/license create mode 100644 node_modules/execa/package.json create mode 100644 node_modules/execa/readme.md create mode 100644 node_modules/expand-brackets/LICENSE create mode 100644 node_modules/expand-brackets/README.md create mode 100644 node_modules/expand-brackets/changelog.md create mode 100644 node_modules/expand-brackets/index.js create mode 100644 node_modules/expand-brackets/lib/compilers.js create mode 100644 node_modules/expand-brackets/lib/parsers.js create mode 100644 node_modules/expand-brackets/lib/utils.js create mode 100644 node_modules/expand-brackets/package.json create mode 100644 node_modules/express/History.md create mode 100644 node_modules/express/LICENSE create mode 100644 node_modules/express/Readme.md create mode 100644 node_modules/express/index.js create mode 100644 node_modules/express/lib/application.js create mode 100644 node_modules/express/lib/express.js create mode 100644 node_modules/express/lib/middleware/init.js create mode 100644 node_modules/express/lib/middleware/query.js create mode 100644 node_modules/express/lib/request.js create mode 100644 node_modules/express/lib/response.js create mode 100644 node_modules/express/lib/router/index.js create mode 100644 node_modules/express/lib/router/layer.js create mode 100644 node_modules/express/lib/router/route.js create mode 100644 node_modules/express/lib/utils.js create mode 100644 node_modules/express/lib/view.js create mode 100644 node_modules/express/package.json create mode 100644 node_modules/extend-shallow/LICENSE create mode 100644 node_modules/extend-shallow/README.md create mode 100644 node_modules/extend-shallow/index.js create mode 100644 node_modules/extend-shallow/package.json create mode 100644 node_modules/extglob/LICENSE create mode 100644 node_modules/extglob/README.md create mode 100644 node_modules/extglob/changelog.md create mode 100644 node_modules/extglob/index.js create mode 100644 node_modules/extglob/lib/compilers.js create mode 100644 node_modules/extglob/lib/extglob.js create mode 100644 node_modules/extglob/lib/parsers.js create mode 100644 node_modules/extglob/lib/utils.js create mode 100644 node_modules/extglob/node_modules/define-property/LICENSE create mode 100644 node_modules/extglob/node_modules/define-property/README.md create mode 100644 node_modules/extglob/node_modules/define-property/index.js create mode 100644 node_modules/extglob/node_modules/define-property/package.json create mode 100644 node_modules/extglob/package.json create mode 100644 node_modules/fill-range/LICENSE create mode 100644 node_modules/fill-range/README.md create mode 100644 node_modules/fill-range/index.js create mode 100644 node_modules/fill-range/package.json create mode 100644 node_modules/finalhandler/HISTORY.md create mode 100644 node_modules/finalhandler/LICENSE create mode 100644 node_modules/finalhandler/README.md create mode 100644 node_modules/finalhandler/index.js create mode 100644 node_modules/finalhandler/package.json create mode 100644 node_modules/for-in/LICENSE create mode 100644 node_modules/for-in/README.md create mode 100644 node_modules/for-in/index.js create mode 100644 node_modules/for-in/package.json create mode 100644 node_modules/forwarded/HISTORY.md create mode 100644 node_modules/forwarded/LICENSE create mode 100644 node_modules/forwarded/README.md create mode 100644 node_modules/forwarded/index.js create mode 100644 node_modules/forwarded/package.json create mode 100644 node_modules/fragment-cache/LICENSE create mode 100644 node_modules/fragment-cache/README.md create mode 100644 node_modules/fragment-cache/index.js create mode 100644 node_modules/fragment-cache/package.json create mode 100644 node_modules/fresh/HISTORY.md create mode 100644 node_modules/fresh/LICENSE create mode 100644 node_modules/fresh/README.md create mode 100644 node_modules/fresh/index.js create mode 100644 node_modules/fresh/package.json create mode 100644 node_modules/fs-minipass/LICENSE create mode 100644 node_modules/fs-minipass/README.md create mode 100644 node_modules/fs-minipass/index.js create mode 100644 node_modules/fs-minipass/package.json create mode 100644 node_modules/fs.realpath/LICENSE create mode 100644 node_modules/fs.realpath/README.md create mode 100644 node_modules/fs.realpath/index.js create mode 100644 node_modules/fs.realpath/old.js create mode 100644 node_modules/fs.realpath/package.json create mode 100644 node_modules/fsevents/.travis.yml create mode 100644 node_modules/fsevents/ISSUE_TEMPLATE.md create mode 100644 node_modules/fsevents/LICENSE create mode 100644 node_modules/fsevents/Readme.md create mode 100644 node_modules/fsevents/binding.gyp create mode 100644 node_modules/fsevents/fsevents.cc create mode 100644 node_modules/fsevents/fsevents.js create mode 100644 node_modules/fsevents/install.js create mode 100755 node_modules/fsevents/lib/binding/Release/node-v72-darwin-x64/fse.node create mode 120000 node_modules/fsevents/node_modules/.bin/node-pre-gyp create mode 100644 node_modules/fsevents/node_modules/abbrev/LICENSE create mode 100644 node_modules/fsevents/node_modules/abbrev/README.md create mode 100644 node_modules/fsevents/node_modules/abbrev/abbrev.js create mode 100644 node_modules/fsevents/node_modules/abbrev/package.json create mode 100644 node_modules/fsevents/node_modules/ansi-regex/index.js create mode 100644 node_modules/fsevents/node_modules/ansi-regex/license create mode 100644 node_modules/fsevents/node_modules/ansi-regex/package.json create mode 100644 node_modules/fsevents/node_modules/ansi-regex/readme.md create mode 100644 node_modules/fsevents/node_modules/aproba/LICENSE create mode 100644 node_modules/fsevents/node_modules/aproba/README.md create mode 100644 node_modules/fsevents/node_modules/aproba/index.js create mode 100644 node_modules/fsevents/node_modules/aproba/package.json create mode 100644 node_modules/fsevents/node_modules/are-we-there-yet/CHANGES.md create mode 100644 node_modules/fsevents/node_modules/are-we-there-yet/LICENSE create mode 100644 node_modules/fsevents/node_modules/are-we-there-yet/README.md create mode 100644 node_modules/fsevents/node_modules/are-we-there-yet/index.js create mode 100644 node_modules/fsevents/node_modules/are-we-there-yet/package.json create mode 100644 node_modules/fsevents/node_modules/are-we-there-yet/tracker-base.js create mode 100644 node_modules/fsevents/node_modules/are-we-there-yet/tracker-group.js create mode 100644 node_modules/fsevents/node_modules/are-we-there-yet/tracker-stream.js create mode 100644 node_modules/fsevents/node_modules/are-we-there-yet/tracker.js create mode 100644 node_modules/fsevents/node_modules/balanced-match/.npmignore create mode 100644 node_modules/fsevents/node_modules/balanced-match/LICENSE.md create mode 100644 node_modules/fsevents/node_modules/balanced-match/README.md create mode 100644 node_modules/fsevents/node_modules/balanced-match/index.js create mode 100644 node_modules/fsevents/node_modules/balanced-match/package.json create mode 100644 node_modules/fsevents/node_modules/brace-expansion/LICENSE create mode 100644 node_modules/fsevents/node_modules/brace-expansion/README.md create mode 100644 node_modules/fsevents/node_modules/brace-expansion/index.js create mode 100644 node_modules/fsevents/node_modules/brace-expansion/package.json create mode 100644 node_modules/fsevents/node_modules/chownr/LICENSE create mode 100644 node_modules/fsevents/node_modules/chownr/README.md create mode 100644 node_modules/fsevents/node_modules/chownr/chownr.js create mode 100644 node_modules/fsevents/node_modules/chownr/package.json create mode 100644 node_modules/fsevents/node_modules/code-point-at/index.js create mode 100644 node_modules/fsevents/node_modules/code-point-at/license create mode 100644 node_modules/fsevents/node_modules/code-point-at/package.json create mode 100644 node_modules/fsevents/node_modules/code-point-at/readme.md create mode 100644 node_modules/fsevents/node_modules/concat-map/.travis.yml create mode 100644 node_modules/fsevents/node_modules/concat-map/LICENSE create mode 100644 node_modules/fsevents/node_modules/concat-map/README.markdown create mode 100644 node_modules/fsevents/node_modules/concat-map/example/map.js create mode 100644 node_modules/fsevents/node_modules/concat-map/index.js create mode 100644 node_modules/fsevents/node_modules/concat-map/package.json create mode 100644 node_modules/fsevents/node_modules/concat-map/test/map.js create mode 100644 node_modules/fsevents/node_modules/console-control-strings/LICENSE create mode 100644 node_modules/fsevents/node_modules/console-control-strings/README.md create mode 100644 node_modules/fsevents/node_modules/console-control-strings/README.md~ create mode 100644 node_modules/fsevents/node_modules/console-control-strings/index.js create mode 100644 node_modules/fsevents/node_modules/console-control-strings/package.json create mode 100644 node_modules/fsevents/node_modules/core-util-is/LICENSE create mode 100644 node_modules/fsevents/node_modules/core-util-is/README.md create mode 100644 node_modules/fsevents/node_modules/core-util-is/float.patch create mode 100644 node_modules/fsevents/node_modules/core-util-is/lib/util.js create mode 100644 node_modules/fsevents/node_modules/core-util-is/package.json create mode 100644 node_modules/fsevents/node_modules/core-util-is/test.js create mode 100644 node_modules/fsevents/node_modules/debug/CHANGELOG.md create mode 100644 node_modules/fsevents/node_modules/debug/LICENSE create mode 100644 node_modules/fsevents/node_modules/debug/README.md create mode 100644 node_modules/fsevents/node_modules/debug/dist/debug.js create mode 100644 node_modules/fsevents/node_modules/debug/package.json create mode 100644 node_modules/fsevents/node_modules/debug/src/browser.js create mode 100644 node_modules/fsevents/node_modules/debug/src/common.js create mode 100644 node_modules/fsevents/node_modules/debug/src/index.js create mode 100644 node_modules/fsevents/node_modules/debug/src/node.js create mode 100644 node_modules/fsevents/node_modules/deep-extend/CHANGELOG.md create mode 100644 node_modules/fsevents/node_modules/deep-extend/LICENSE create mode 100644 node_modules/fsevents/node_modules/deep-extend/README.md create mode 100644 node_modules/fsevents/node_modules/deep-extend/index.js create mode 100644 node_modules/fsevents/node_modules/deep-extend/lib/deep-extend.js create mode 100644 node_modules/fsevents/node_modules/deep-extend/package.json create mode 100644 node_modules/fsevents/node_modules/delegates/.npmignore create mode 100644 node_modules/fsevents/node_modules/delegates/History.md create mode 100644 node_modules/fsevents/node_modules/delegates/License create mode 100644 node_modules/fsevents/node_modules/delegates/Makefile create mode 100644 node_modules/fsevents/node_modules/delegates/Readme.md create mode 100644 node_modules/fsevents/node_modules/delegates/index.js create mode 100644 node_modules/fsevents/node_modules/delegates/package.json create mode 100644 node_modules/fsevents/node_modules/delegates/test/index.js create mode 100644 node_modules/fsevents/node_modules/detect-libc/.npmignore create mode 100644 node_modules/fsevents/node_modules/detect-libc/LICENSE create mode 100644 node_modules/fsevents/node_modules/detect-libc/README.md create mode 100755 node_modules/fsevents/node_modules/detect-libc/bin/detect-libc.js create mode 100644 node_modules/fsevents/node_modules/detect-libc/lib/detect-libc.js create mode 100644 node_modules/fsevents/node_modules/detect-libc/package.json create mode 100644 node_modules/fsevents/node_modules/fs-minipass/LICENSE create mode 100644 node_modules/fsevents/node_modules/fs-minipass/README.md create mode 100644 node_modules/fsevents/node_modules/fs-minipass/index.js create mode 100644 node_modules/fsevents/node_modules/fs-minipass/package.json create mode 100644 node_modules/fsevents/node_modules/fs.realpath/LICENSE create mode 100644 node_modules/fsevents/node_modules/fs.realpath/README.md create mode 100644 node_modules/fsevents/node_modules/fs.realpath/index.js create mode 100644 node_modules/fsevents/node_modules/fs.realpath/old.js create mode 100644 node_modules/fsevents/node_modules/fs.realpath/package.json create mode 100644 node_modules/fsevents/node_modules/gauge/CHANGELOG.md create mode 100644 node_modules/fsevents/node_modules/gauge/LICENSE create mode 100644 node_modules/fsevents/node_modules/gauge/README.md create mode 100644 node_modules/fsevents/node_modules/gauge/base-theme.js create mode 100644 node_modules/fsevents/node_modules/gauge/error.js create mode 100644 node_modules/fsevents/node_modules/gauge/has-color.js create mode 100644 node_modules/fsevents/node_modules/gauge/index.js create mode 100644 node_modules/fsevents/node_modules/gauge/package.json create mode 100644 node_modules/fsevents/node_modules/gauge/plumbing.js create mode 100644 node_modules/fsevents/node_modules/gauge/process.js create mode 100644 node_modules/fsevents/node_modules/gauge/progress-bar.js create mode 100644 node_modules/fsevents/node_modules/gauge/render-template.js create mode 100644 node_modules/fsevents/node_modules/gauge/set-immediate.js create mode 100644 node_modules/fsevents/node_modules/gauge/set-interval.js create mode 100644 node_modules/fsevents/node_modules/gauge/spin.js create mode 100644 node_modules/fsevents/node_modules/gauge/template-item.js create mode 100644 node_modules/fsevents/node_modules/gauge/theme-set.js create mode 100644 node_modules/fsevents/node_modules/gauge/themes.js create mode 100644 node_modules/fsevents/node_modules/gauge/wide-truncate.js create mode 100644 node_modules/fsevents/node_modules/glob/LICENSE create mode 100644 node_modules/fsevents/node_modules/glob/README.md create mode 100644 node_modules/fsevents/node_modules/glob/changelog.md create mode 100644 node_modules/fsevents/node_modules/glob/common.js create mode 100644 node_modules/fsevents/node_modules/glob/glob.js create mode 100644 node_modules/fsevents/node_modules/glob/package.json create mode 100644 node_modules/fsevents/node_modules/glob/sync.js create mode 100644 node_modules/fsevents/node_modules/has-unicode/LICENSE create mode 100644 node_modules/fsevents/node_modules/has-unicode/README.md create mode 100644 node_modules/fsevents/node_modules/has-unicode/index.js create mode 100644 node_modules/fsevents/node_modules/has-unicode/package.json create mode 100644 node_modules/fsevents/node_modules/iconv-lite/Changelog.md create mode 100644 node_modules/fsevents/node_modules/iconv-lite/LICENSE create mode 100644 node_modules/fsevents/node_modules/iconv-lite/README.md create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/dbcs-codec.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/dbcs-data.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/index.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/internal.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/sbcs-codec.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/sbcs-data-generated.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/sbcs-data.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/tables/big5-added.json create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/tables/cp936.json create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/tables/cp949.json create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/tables/cp950.json create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/tables/eucjp.json create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/tables/gbk-added.json create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/tables/shiftjis.json create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/utf16.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/encodings/utf7.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/lib/bom-handling.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/lib/extend-node.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/lib/index.d.ts create mode 100644 node_modules/fsevents/node_modules/iconv-lite/lib/index.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/lib/streams.js create mode 100644 node_modules/fsevents/node_modules/iconv-lite/package.json create mode 100644 node_modules/fsevents/node_modules/ignore-walk/LICENSE create mode 100644 node_modules/fsevents/node_modules/ignore-walk/README.md create mode 100644 node_modules/fsevents/node_modules/ignore-walk/index.js create mode 100644 node_modules/fsevents/node_modules/ignore-walk/package.json create mode 100644 node_modules/fsevents/node_modules/inflight/LICENSE create mode 100644 node_modules/fsevents/node_modules/inflight/README.md create mode 100644 node_modules/fsevents/node_modules/inflight/inflight.js create mode 100644 node_modules/fsevents/node_modules/inflight/package.json create mode 100644 node_modules/fsevents/node_modules/inherits/LICENSE create mode 100644 node_modules/fsevents/node_modules/inherits/README.md create mode 100644 node_modules/fsevents/node_modules/inherits/inherits.js create mode 100644 node_modules/fsevents/node_modules/inherits/inherits_browser.js create mode 100644 node_modules/fsevents/node_modules/inherits/package.json create mode 100644 node_modules/fsevents/node_modules/ini/LICENSE create mode 100644 node_modules/fsevents/node_modules/ini/README.md create mode 100644 node_modules/fsevents/node_modules/ini/ini.js create mode 100644 node_modules/fsevents/node_modules/ini/package.json create mode 100644 node_modules/fsevents/node_modules/is-fullwidth-code-point/index.js create mode 100644 node_modules/fsevents/node_modules/is-fullwidth-code-point/license create mode 100644 node_modules/fsevents/node_modules/is-fullwidth-code-point/package.json create mode 100644 node_modules/fsevents/node_modules/is-fullwidth-code-point/readme.md create mode 100644 node_modules/fsevents/node_modules/isarray/.npmignore create mode 100644 node_modules/fsevents/node_modules/isarray/.travis.yml create mode 100644 node_modules/fsevents/node_modules/isarray/Makefile create mode 100644 node_modules/fsevents/node_modules/isarray/README.md create mode 100644 node_modules/fsevents/node_modules/isarray/component.json create mode 100644 node_modules/fsevents/node_modules/isarray/index.js create mode 100644 node_modules/fsevents/node_modules/isarray/package.json create mode 100644 node_modules/fsevents/node_modules/isarray/test.js create mode 100644 node_modules/fsevents/node_modules/minimatch/LICENSE create mode 100644 node_modules/fsevents/node_modules/minimatch/README.md create mode 100644 node_modules/fsevents/node_modules/minimatch/minimatch.js create mode 100644 node_modules/fsevents/node_modules/minimatch/package.json create mode 100644 node_modules/fsevents/node_modules/minimist/.travis.yml create mode 100644 node_modules/fsevents/node_modules/minimist/LICENSE create mode 100644 node_modules/fsevents/node_modules/minimist/example/parse.js create mode 100644 node_modules/fsevents/node_modules/minimist/index.js create mode 100644 node_modules/fsevents/node_modules/minimist/package.json create mode 100644 node_modules/fsevents/node_modules/minimist/readme.markdown create mode 100644 node_modules/fsevents/node_modules/minimist/test/dash.js create mode 100644 node_modules/fsevents/node_modules/minimist/test/default_bool.js create mode 100644 node_modules/fsevents/node_modules/minimist/test/dotted.js create mode 100644 node_modules/fsevents/node_modules/minimist/test/long.js create mode 100644 node_modules/fsevents/node_modules/minimist/test/parse.js create mode 100644 node_modules/fsevents/node_modules/minimist/test/parse_modified.js create mode 100644 node_modules/fsevents/node_modules/minimist/test/short.js create mode 100644 node_modules/fsevents/node_modules/minimist/test/whitespace.js create mode 100644 node_modules/fsevents/node_modules/minipass/LICENSE create mode 100644 node_modules/fsevents/node_modules/minipass/README.md create mode 100644 node_modules/fsevents/node_modules/minipass/index.js create mode 100644 node_modules/fsevents/node_modules/minipass/package.json create mode 100644 node_modules/fsevents/node_modules/minizlib/LICENSE create mode 100644 node_modules/fsevents/node_modules/minizlib/README.md create mode 100644 node_modules/fsevents/node_modules/minizlib/constants.js create mode 100644 node_modules/fsevents/node_modules/minizlib/index.js create mode 100644 node_modules/fsevents/node_modules/minizlib/package.json create mode 100644 node_modules/fsevents/node_modules/mkdirp/.travis.yml create mode 100644 node_modules/fsevents/node_modules/mkdirp/LICENSE create mode 100755 node_modules/fsevents/node_modules/mkdirp/bin/cmd.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/bin/usage.txt create mode 100644 node_modules/fsevents/node_modules/mkdirp/examples/pow.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/index.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/package.json create mode 100644 node_modules/fsevents/node_modules/mkdirp/readme.markdown create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/chmod.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/clobber.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/mkdirp.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/opts_fs.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/opts_fs_sync.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/perm.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/perm_sync.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/race.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/rel.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/return.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/return_sync.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/root.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/sync.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/umask.js create mode 100644 node_modules/fsevents/node_modules/mkdirp/test/umask_sync.js create mode 100644 node_modules/fsevents/node_modules/ms/index.js create mode 100644 node_modules/fsevents/node_modules/ms/license.md create mode 100644 node_modules/fsevents/node_modules/ms/package.json create mode 100644 node_modules/fsevents/node_modules/ms/readme.md create mode 100644 node_modules/fsevents/node_modules/needle/.npmignore create mode 100644 node_modules/fsevents/node_modules/needle/README.md create mode 100755 node_modules/fsevents/node_modules/needle/bin/needle create mode 100644 node_modules/fsevents/node_modules/needle/examples/deflated-stream.js create mode 100644 node_modules/fsevents/node_modules/needle/examples/digest-auth.js create mode 100644 node_modules/fsevents/node_modules/needle/examples/download-to-file.js create mode 100644 node_modules/fsevents/node_modules/needle/examples/multipart-stream.js create mode 100644 node_modules/fsevents/node_modules/needle/examples/parsed-stream.js create mode 100644 node_modules/fsevents/node_modules/needle/examples/parsed-stream2.js create mode 100644 node_modules/fsevents/node_modules/needle/examples/stream-events.js create mode 100644 node_modules/fsevents/node_modules/needle/examples/stream-to-file.js create mode 100644 node_modules/fsevents/node_modules/needle/examples/upload-image.js create mode 100644 node_modules/fsevents/node_modules/needle/lib/auth.js create mode 100644 node_modules/fsevents/node_modules/needle/lib/cookies.js create mode 100644 node_modules/fsevents/node_modules/needle/lib/decoder.js create mode 100644 node_modules/fsevents/node_modules/needle/lib/multipart.js create mode 100644 node_modules/fsevents/node_modules/needle/lib/needle.js create mode 100644 node_modules/fsevents/node_modules/needle/lib/parsers.js create mode 100644 node_modules/fsevents/node_modules/needle/lib/querystring.js create mode 100644 node_modules/fsevents/node_modules/needle/license.txt create mode 100644 node_modules/fsevents/node_modules/needle/package-lock.json create mode 100644 node_modules/fsevents/node_modules/needle/package.json create mode 100644 node_modules/fsevents/node_modules/needle/test/basic_auth_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/compression_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/cookies_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/decoder_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/errors_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/headers_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/helpers.js create mode 100644 node_modules/fsevents/node_modules/needle/test/keys/ssl.cert create mode 100644 node_modules/fsevents/node_modules/needle/test/keys/ssl.key create mode 100644 node_modules/fsevents/node_modules/needle/test/long_string_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/output_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/parsing_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/post_data_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/proxy_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/querystring_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/redirect_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/redirect_with_timeout.js create mode 100644 node_modules/fsevents/node_modules/needle/test/request_stream_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/response_stream_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/socket_pool_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/url_spec.js create mode 100644 node_modules/fsevents/node_modules/needle/test/utils/formidable.js create mode 100644 node_modules/fsevents/node_modules/needle/test/utils/proxy.js create mode 100644 node_modules/fsevents/node_modules/needle/test/utils/test.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/CHANGELOG.md create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/LICENSE create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/README.md create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/appveyor.yml create mode 100755 node_modules/fsevents/node_modules/node-pre-gyp/bin/node-pre-gyp create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/bin/node-pre-gyp.cmd create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/contributing.md create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/build.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/clean.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/configure.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/info.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/install.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/node-pre-gyp.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/package.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/pre-binding.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/publish.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/rebuild.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/reinstall.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/reveal.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/testbinary.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/testpackage.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/unpublish.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/util/abi_crosswalk.json create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/util/compile.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/util/napi.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/util/s3_setup.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/lib/util/versioning.js create mode 100644 node_modules/fsevents/node_modules/node-pre-gyp/package.json create mode 100644 node_modules/fsevents/node_modules/nopt/.npmignore create mode 100644 node_modules/fsevents/node_modules/nopt/.travis.yml create mode 100644 node_modules/fsevents/node_modules/nopt/CHANGELOG.md create mode 100644 node_modules/fsevents/node_modules/nopt/LICENSE create mode 100644 node_modules/fsevents/node_modules/nopt/README.md create mode 100755 node_modules/fsevents/node_modules/nopt/bin/nopt.js create mode 100755 node_modules/fsevents/node_modules/nopt/examples/my-program.js create mode 100644 node_modules/fsevents/node_modules/nopt/lib/nopt.js create mode 100644 node_modules/fsevents/node_modules/nopt/package.json create mode 100644 node_modules/fsevents/node_modules/nopt/test/basic.js create mode 100644 node_modules/fsevents/node_modules/npm-bundled/LICENSE create mode 100644 node_modules/fsevents/node_modules/npm-bundled/README.md create mode 100644 node_modules/fsevents/node_modules/npm-bundled/index.js create mode 100644 node_modules/fsevents/node_modules/npm-bundled/package.json create mode 100644 node_modules/fsevents/node_modules/npm-packlist/LICENSE create mode 100644 node_modules/fsevents/node_modules/npm-packlist/README.md create mode 100644 node_modules/fsevents/node_modules/npm-packlist/index.js create mode 100644 node_modules/fsevents/node_modules/npm-packlist/package.json create mode 100644 node_modules/fsevents/node_modules/npmlog/CHANGELOG.md create mode 100644 node_modules/fsevents/node_modules/npmlog/LICENSE create mode 100644 node_modules/fsevents/node_modules/npmlog/README.md create mode 100644 node_modules/fsevents/node_modules/npmlog/log.js create mode 100644 node_modules/fsevents/node_modules/npmlog/package.json create mode 100644 node_modules/fsevents/node_modules/number-is-nan/index.js create mode 100644 node_modules/fsevents/node_modules/number-is-nan/license create mode 100644 node_modules/fsevents/node_modules/number-is-nan/package.json create mode 100644 node_modules/fsevents/node_modules/number-is-nan/readme.md create mode 100644 node_modules/fsevents/node_modules/object-assign/index.js create mode 100644 node_modules/fsevents/node_modules/object-assign/license create mode 100644 node_modules/fsevents/node_modules/object-assign/package.json create mode 100644 node_modules/fsevents/node_modules/object-assign/readme.md create mode 100644 node_modules/fsevents/node_modules/once/LICENSE create mode 100644 node_modules/fsevents/node_modules/once/README.md create mode 100644 node_modules/fsevents/node_modules/once/once.js create mode 100644 node_modules/fsevents/node_modules/once/package.json create mode 100644 node_modules/fsevents/node_modules/os-homedir/index.js create mode 100644 node_modules/fsevents/node_modules/os-homedir/license create mode 100644 node_modules/fsevents/node_modules/os-homedir/package.json create mode 100644 node_modules/fsevents/node_modules/os-homedir/readme.md create mode 100644 node_modules/fsevents/node_modules/os-tmpdir/index.js create mode 100644 node_modules/fsevents/node_modules/os-tmpdir/license create mode 100644 node_modules/fsevents/node_modules/os-tmpdir/package.json create mode 100644 node_modules/fsevents/node_modules/os-tmpdir/readme.md create mode 100644 node_modules/fsevents/node_modules/osenv/LICENSE create mode 100644 node_modules/fsevents/node_modules/osenv/README.md create mode 100644 node_modules/fsevents/node_modules/osenv/osenv.js create mode 100644 node_modules/fsevents/node_modules/osenv/package.json create mode 100644 node_modules/fsevents/node_modules/path-is-absolute/index.js create mode 100644 node_modules/fsevents/node_modules/path-is-absolute/license create mode 100644 node_modules/fsevents/node_modules/path-is-absolute/package.json create mode 100644 node_modules/fsevents/node_modules/path-is-absolute/readme.md create mode 100644 node_modules/fsevents/node_modules/process-nextick-args/index.js create mode 100644 node_modules/fsevents/node_modules/process-nextick-args/license.md create mode 100644 node_modules/fsevents/node_modules/process-nextick-args/package.json create mode 100644 node_modules/fsevents/node_modules/process-nextick-args/readme.md create mode 100644 node_modules/fsevents/node_modules/rc/LICENSE.APACHE2 create mode 100644 node_modules/fsevents/node_modules/rc/LICENSE.BSD create mode 100644 node_modules/fsevents/node_modules/rc/LICENSE.MIT create mode 100644 node_modules/fsevents/node_modules/rc/README.md create mode 100644 node_modules/fsevents/node_modules/rc/browser.js create mode 100755 node_modules/fsevents/node_modules/rc/cli.js create mode 100755 node_modules/fsevents/node_modules/rc/index.js create mode 100644 node_modules/fsevents/node_modules/rc/lib/utils.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/.travis.yml create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/LICENSE create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/example/parse.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/index.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/package.json create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/readme.markdown create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/all_bool.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/bool.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dash.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/default_bool.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dotted.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/kv_short.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/long.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/num.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse_modified.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/short.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/stop_early.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/unknown.js create mode 100644 node_modules/fsevents/node_modules/rc/node_modules/minimist/test/whitespace.js create mode 100644 node_modules/fsevents/node_modules/rc/package.json create mode 100644 node_modules/fsevents/node_modules/rc/test/ini.js create mode 100644 node_modules/fsevents/node_modules/rc/test/nested-env-vars.js create mode 100644 node_modules/fsevents/node_modules/rc/test/test.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/.travis.yml create mode 100644 node_modules/fsevents/node_modules/readable-stream/CONTRIBUTING.md create mode 100644 node_modules/fsevents/node_modules/readable-stream/GOVERNANCE.md create mode 100644 node_modules/fsevents/node_modules/readable-stream/LICENSE create mode 100644 node_modules/fsevents/node_modules/readable-stream/README.md create mode 100644 node_modules/fsevents/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md create mode 100644 node_modules/fsevents/node_modules/readable-stream/duplex-browser.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/duplex.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/lib/_stream_duplex.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/lib/_stream_passthrough.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/lib/_stream_readable.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/lib/_stream_transform.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/lib/_stream_writable.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/BufferList.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/destroy.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/stream-browser.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/stream.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/package.json create mode 100644 node_modules/fsevents/node_modules/readable-stream/passthrough.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/readable-browser.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/readable.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/transform.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/writable-browser.js create mode 100644 node_modules/fsevents/node_modules/readable-stream/writable.js create mode 100644 node_modules/fsevents/node_modules/rimraf/LICENSE create mode 100644 node_modules/fsevents/node_modules/rimraf/README.md create mode 100755 node_modules/fsevents/node_modules/rimraf/bin.js create mode 100644 node_modules/fsevents/node_modules/rimraf/package.json create mode 100644 node_modules/fsevents/node_modules/rimraf/rimraf.js create mode 100644 node_modules/fsevents/node_modules/safe-buffer/LICENSE create mode 100644 node_modules/fsevents/node_modules/safe-buffer/README.md create mode 100644 node_modules/fsevents/node_modules/safe-buffer/index.d.ts create mode 100644 node_modules/fsevents/node_modules/safe-buffer/index.js create mode 100644 node_modules/fsevents/node_modules/safe-buffer/package.json create mode 100644 node_modules/fsevents/node_modules/safer-buffer/LICENSE create mode 100644 node_modules/fsevents/node_modules/safer-buffer/Porting-Buffer.md create mode 100644 node_modules/fsevents/node_modules/safer-buffer/Readme.md create mode 100644 node_modules/fsevents/node_modules/safer-buffer/dangerous.js create mode 100644 node_modules/fsevents/node_modules/safer-buffer/package.json create mode 100644 node_modules/fsevents/node_modules/safer-buffer/safer.js create mode 100644 node_modules/fsevents/node_modules/safer-buffer/tests.js create mode 100644 node_modules/fsevents/node_modules/sax/LICENSE create mode 100644 node_modules/fsevents/node_modules/sax/README.md create mode 100644 node_modules/fsevents/node_modules/sax/lib/sax.js create mode 100644 node_modules/fsevents/node_modules/sax/package.json create mode 100644 node_modules/fsevents/node_modules/semver/CHANGELOG.md create mode 100644 node_modules/fsevents/node_modules/semver/LICENSE create mode 100644 node_modules/fsevents/node_modules/semver/README.md create mode 100755 node_modules/fsevents/node_modules/semver/bin/semver create mode 100644 node_modules/fsevents/node_modules/semver/package.json create mode 100644 node_modules/fsevents/node_modules/semver/range.bnf create mode 100644 node_modules/fsevents/node_modules/semver/semver.js create mode 100644 node_modules/fsevents/node_modules/set-blocking/CHANGELOG.md create mode 100644 node_modules/fsevents/node_modules/set-blocking/LICENSE.txt create mode 100644 node_modules/fsevents/node_modules/set-blocking/README.md create mode 100644 node_modules/fsevents/node_modules/set-blocking/index.js create mode 100644 node_modules/fsevents/node_modules/set-blocking/package.json create mode 100644 node_modules/fsevents/node_modules/signal-exit/CHANGELOG.md create mode 100644 node_modules/fsevents/node_modules/signal-exit/LICENSE.txt create mode 100644 node_modules/fsevents/node_modules/signal-exit/README.md create mode 100644 node_modules/fsevents/node_modules/signal-exit/index.js create mode 100644 node_modules/fsevents/node_modules/signal-exit/package.json create mode 100644 node_modules/fsevents/node_modules/signal-exit/signals.js create mode 100644 node_modules/fsevents/node_modules/string-width/index.js create mode 100644 node_modules/fsevents/node_modules/string-width/license create mode 100644 node_modules/fsevents/node_modules/string-width/package.json create mode 100644 node_modules/fsevents/node_modules/string-width/readme.md create mode 100644 node_modules/fsevents/node_modules/string_decoder/.travis.yml create mode 100644 node_modules/fsevents/node_modules/string_decoder/LICENSE create mode 100644 node_modules/fsevents/node_modules/string_decoder/README.md create mode 100644 node_modules/fsevents/node_modules/string_decoder/lib/string_decoder.js create mode 100644 node_modules/fsevents/node_modules/string_decoder/package.json create mode 100644 node_modules/fsevents/node_modules/strip-ansi/index.js create mode 100644 node_modules/fsevents/node_modules/strip-ansi/license create mode 100644 node_modules/fsevents/node_modules/strip-ansi/package.json create mode 100644 node_modules/fsevents/node_modules/strip-ansi/readme.md create mode 100644 node_modules/fsevents/node_modules/strip-json-comments/index.js create mode 100644 node_modules/fsevents/node_modules/strip-json-comments/license create mode 100644 node_modules/fsevents/node_modules/strip-json-comments/package.json create mode 100644 node_modules/fsevents/node_modules/strip-json-comments/readme.md create mode 100644 node_modules/fsevents/node_modules/tar/LICENSE create mode 100644 node_modules/fsevents/node_modules/tar/README.md create mode 100644 node_modules/fsevents/node_modules/tar/index.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/buffer.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/create.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/extract.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/header.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/high-level-opt.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/large-numbers.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/list.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/mkdir.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/mode-fix.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/pack.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/parse.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/pax.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/read-entry.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/replace.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/types.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/unpack.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/update.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/warn-mixin.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/winchars.js create mode 100644 node_modules/fsevents/node_modules/tar/lib/write-entry.js create mode 100644 node_modules/fsevents/node_modules/tar/package.json create mode 100644 node_modules/fsevents/node_modules/util-deprecate/History.md create mode 100644 node_modules/fsevents/node_modules/util-deprecate/LICENSE create mode 100644 node_modules/fsevents/node_modules/util-deprecate/README.md create mode 100644 node_modules/fsevents/node_modules/util-deprecate/browser.js create mode 100644 node_modules/fsevents/node_modules/util-deprecate/node.js create mode 100644 node_modules/fsevents/node_modules/util-deprecate/package.json create mode 100644 node_modules/fsevents/node_modules/wide-align/LICENSE create mode 100644 node_modules/fsevents/node_modules/wide-align/README.md create mode 100644 node_modules/fsevents/node_modules/wide-align/align.js create mode 100644 node_modules/fsevents/node_modules/wide-align/package.json create mode 100644 node_modules/fsevents/node_modules/wrappy/LICENSE create mode 100644 node_modules/fsevents/node_modules/wrappy/README.md create mode 100644 node_modules/fsevents/node_modules/wrappy/package.json create mode 100644 node_modules/fsevents/node_modules/wrappy/wrappy.js create mode 100644 node_modules/fsevents/node_modules/yallist/LICENSE create mode 100644 node_modules/fsevents/node_modules/yallist/README.md create mode 100644 node_modules/fsevents/node_modules/yallist/iterator.js create mode 100644 node_modules/fsevents/node_modules/yallist/package.json create mode 100644 node_modules/fsevents/node_modules/yallist/yallist.js create mode 100644 node_modules/fsevents/package.json create mode 100644 node_modules/fsevents/src/async.cc create mode 100644 node_modules/fsevents/src/constants.cc create mode 100644 node_modules/fsevents/src/methods.cc create mode 100644 node_modules/fsevents/src/storage.cc create mode 100644 node_modules/fsevents/src/thread.cc create mode 100644 node_modules/gauge/CHANGELOG.md create mode 100644 node_modules/gauge/LICENSE create mode 100644 node_modules/gauge/README.md create mode 100644 node_modules/gauge/base-theme.js create mode 100644 node_modules/gauge/error.js create mode 100644 node_modules/gauge/has-color.js create mode 100644 node_modules/gauge/index.js create mode 100644 node_modules/gauge/node_modules/is-fullwidth-code-point/index.js create mode 100644 node_modules/gauge/node_modules/is-fullwidth-code-point/license create mode 100644 node_modules/gauge/node_modules/is-fullwidth-code-point/package.json create mode 100644 node_modules/gauge/node_modules/is-fullwidth-code-point/readme.md create mode 100644 node_modules/gauge/node_modules/string-width/index.js create mode 100644 node_modules/gauge/node_modules/string-width/license create mode 100644 node_modules/gauge/node_modules/string-width/package.json create mode 100644 node_modules/gauge/node_modules/string-width/readme.md create mode 100644 node_modules/gauge/package.json create mode 100644 node_modules/gauge/plumbing.js create mode 100644 node_modules/gauge/process.js create mode 100644 node_modules/gauge/progress-bar.js create mode 100644 node_modules/gauge/render-template.js create mode 100644 node_modules/gauge/set-immediate.js create mode 100644 node_modules/gauge/set-interval.js create mode 100644 node_modules/gauge/spin.js create mode 100644 node_modules/gauge/template-item.js create mode 100644 node_modules/gauge/theme-set.js create mode 100644 node_modules/gauge/themes.js create mode 100644 node_modules/gauge/wide-truncate.js create mode 100644 node_modules/get-stream/buffer-stream.js create mode 100644 node_modules/get-stream/index.js create mode 100644 node_modules/get-stream/license create mode 100644 node_modules/get-stream/package.json create mode 100644 node_modules/get-stream/readme.md create mode 100644 node_modules/get-value/LICENSE create mode 100644 node_modules/get-value/index.js create mode 100644 node_modules/get-value/package.json create mode 100644 node_modules/glob-parent/LICENSE create mode 100644 node_modules/glob-parent/README.md create mode 100644 node_modules/glob-parent/index.js create mode 100644 node_modules/glob-parent/node_modules/is-glob/LICENSE create mode 100644 node_modules/glob-parent/node_modules/is-glob/README.md create mode 100644 node_modules/glob-parent/node_modules/is-glob/index.js create mode 100644 node_modules/glob-parent/node_modules/is-glob/package.json create mode 100644 node_modules/glob-parent/package.json create mode 100644 node_modules/glob/LICENSE create mode 100644 node_modules/glob/README.md create mode 100644 node_modules/glob/changelog.md create mode 100644 node_modules/glob/common.js create mode 100644 node_modules/glob/glob.js create mode 100644 node_modules/glob/package.json create mode 100644 node_modules/glob/sync.js create mode 100644 node_modules/global-dirs/index.js create mode 100644 node_modules/global-dirs/license create mode 100644 node_modules/global-dirs/package.json create mode 100644 node_modules/global-dirs/readme.md create mode 100644 node_modules/got/index.js create mode 100644 node_modules/got/license create mode 100644 node_modules/got/node_modules/safe-buffer/LICENSE create mode 100644 node_modules/got/node_modules/safe-buffer/README.md create mode 100644 node_modules/got/node_modules/safe-buffer/index.d.ts create mode 100644 node_modules/got/node_modules/safe-buffer/index.js create mode 100644 node_modules/got/node_modules/safe-buffer/package.json create mode 100644 node_modules/got/package.json create mode 100644 node_modules/got/readme.md create mode 100644 node_modules/graceful-fs/LICENSE create mode 100644 node_modules/graceful-fs/README.md create mode 100644 node_modules/graceful-fs/clone.js create mode 100644 node_modules/graceful-fs/graceful-fs.js create mode 100644 node_modules/graceful-fs/legacy-streams.js create mode 100644 node_modules/graceful-fs/package.json create mode 100644 node_modules/graceful-fs/polyfills.js create mode 100644 node_modules/has-flag/index.js create mode 100644 node_modules/has-flag/license create mode 100644 node_modules/has-flag/package.json create mode 100644 node_modules/has-flag/readme.md create mode 100644 node_modules/has-unicode/LICENSE create mode 100644 node_modules/has-unicode/README.md create mode 100644 node_modules/has-unicode/index.js create mode 100644 node_modules/has-unicode/package.json create mode 100644 node_modules/has-value/LICENSE create mode 100644 node_modules/has-value/README.md create mode 100644 node_modules/has-value/index.js create mode 100644 node_modules/has-value/package.json create mode 100644 node_modules/has-values/LICENSE create mode 100644 node_modules/has-values/README.md create mode 100644 node_modules/has-values/index.js create mode 100644 node_modules/has-values/node_modules/kind-of/LICENSE create mode 100644 node_modules/has-values/node_modules/kind-of/README.md create mode 100644 node_modules/has-values/node_modules/kind-of/index.js create mode 100644 node_modules/has-values/node_modules/kind-of/package.json create mode 100644 node_modules/has-values/package.json create mode 100644 node_modules/http-errors/HISTORY.md create mode 100644 node_modules/http-errors/LICENSE create mode 100644 node_modules/http-errors/README.md create mode 100644 node_modules/http-errors/index.js create mode 100644 node_modules/http-errors/node_modules/inherits/LICENSE create mode 100644 node_modules/http-errors/node_modules/inherits/README.md create mode 100644 node_modules/http-errors/node_modules/inherits/inherits.js create mode 100644 node_modules/http-errors/node_modules/inherits/inherits_browser.js create mode 100644 node_modules/http-errors/node_modules/inherits/package.json create mode 100644 node_modules/http-errors/package.json create mode 100644 node_modules/iconv-lite/Changelog.md create mode 100644 node_modules/iconv-lite/LICENSE create mode 100644 node_modules/iconv-lite/README.md create mode 100644 node_modules/iconv-lite/encodings/dbcs-codec.js create mode 100644 node_modules/iconv-lite/encodings/dbcs-data.js create mode 100644 node_modules/iconv-lite/encodings/index.js create mode 100644 node_modules/iconv-lite/encodings/internal.js create mode 100644 node_modules/iconv-lite/encodings/sbcs-codec.js create mode 100644 node_modules/iconv-lite/encodings/sbcs-data-generated.js create mode 100644 node_modules/iconv-lite/encodings/sbcs-data.js create mode 100644 node_modules/iconv-lite/encodings/tables/big5-added.json create mode 100644 node_modules/iconv-lite/encodings/tables/cp936.json create mode 100644 node_modules/iconv-lite/encodings/tables/cp949.json create mode 100644 node_modules/iconv-lite/encodings/tables/cp950.json create mode 100644 node_modules/iconv-lite/encodings/tables/eucjp.json create mode 100644 node_modules/iconv-lite/encodings/tables/gb18030-ranges.json create mode 100644 node_modules/iconv-lite/encodings/tables/gbk-added.json create mode 100644 node_modules/iconv-lite/encodings/tables/shiftjis.json create mode 100644 node_modules/iconv-lite/encodings/utf16.js create mode 100644 node_modules/iconv-lite/encodings/utf7.js create mode 100644 node_modules/iconv-lite/lib/bom-handling.js create mode 100644 node_modules/iconv-lite/lib/extend-node.js create mode 100644 node_modules/iconv-lite/lib/index.d.ts create mode 100644 node_modules/iconv-lite/lib/index.js create mode 100644 node_modules/iconv-lite/lib/streams.js create mode 100644 node_modules/iconv-lite/package.json create mode 100644 node_modules/ignore-by-default/LICENSE create mode 100644 node_modules/ignore-by-default/README.md create mode 100644 node_modules/ignore-by-default/index.js create mode 100644 node_modules/ignore-by-default/package.json create mode 100644 node_modules/ignore-walk/LICENSE create mode 100644 node_modules/ignore-walk/README.md create mode 100644 node_modules/ignore-walk/index.js create mode 100644 node_modules/ignore-walk/package.json create mode 100644 node_modules/import-lazy/index.js create mode 100644 node_modules/import-lazy/license create mode 100644 node_modules/import-lazy/package.json create mode 100644 node_modules/import-lazy/readme.md create mode 100644 node_modules/imurmurhash/README.md create mode 100644 node_modules/imurmurhash/imurmurhash.js create mode 100644 node_modules/imurmurhash/imurmurhash.min.js create mode 100644 node_modules/imurmurhash/package.json create mode 100644 node_modules/inflight/LICENSE create mode 100644 node_modules/inflight/README.md create mode 100644 node_modules/inflight/inflight.js create mode 100644 node_modules/inflight/package.json create mode 100644 node_modules/inherits/LICENSE create mode 100644 node_modules/inherits/README.md create mode 100644 node_modules/inherits/inherits.js create mode 100644 node_modules/inherits/inherits_browser.js create mode 100644 node_modules/inherits/package.json create mode 100644 node_modules/ini/LICENSE create mode 100644 node_modules/ini/README.md create mode 100644 node_modules/ini/ini.js create mode 100644 node_modules/ini/package.json create mode 100644 node_modules/ipaddr.js/LICENSE create mode 100644 node_modules/ipaddr.js/README.md create mode 100644 node_modules/ipaddr.js/ipaddr.min.js create mode 100644 node_modules/ipaddr.js/lib/ipaddr.js create mode 100644 node_modules/ipaddr.js/lib/ipaddr.js.d.ts create mode 100644 node_modules/ipaddr.js/package.json create mode 100644 node_modules/is-accessor-descriptor/LICENSE create mode 100644 node_modules/is-accessor-descriptor/README.md create mode 100644 node_modules/is-accessor-descriptor/index.js create mode 100644 node_modules/is-accessor-descriptor/node_modules/kind-of/CHANGELOG.md create mode 100644 node_modules/is-accessor-descriptor/node_modules/kind-of/LICENSE create mode 100644 node_modules/is-accessor-descriptor/node_modules/kind-of/README.md create mode 100644 node_modules/is-accessor-descriptor/node_modules/kind-of/index.js create mode 100644 node_modules/is-accessor-descriptor/node_modules/kind-of/package.json create mode 100644 node_modules/is-accessor-descriptor/package.json create mode 100644 node_modules/is-binary-path/index.js create mode 100644 node_modules/is-binary-path/license create mode 100644 node_modules/is-binary-path/package.json create mode 100644 node_modules/is-binary-path/readme.md create mode 100644 node_modules/is-buffer/LICENSE create mode 100644 node_modules/is-buffer/README.md create mode 100644 node_modules/is-buffer/index.js create mode 100644 node_modules/is-buffer/package.json create mode 100644 node_modules/is-buffer/test/basic.js create mode 100644 node_modules/is-ci/LICENSE create mode 100644 node_modules/is-ci/README.md create mode 100755 node_modules/is-ci/bin.js create mode 100644 node_modules/is-ci/index.js create mode 100644 node_modules/is-ci/package.json create mode 100644 node_modules/is-data-descriptor/LICENSE create mode 100644 node_modules/is-data-descriptor/README.md create mode 100644 node_modules/is-data-descriptor/index.js create mode 100644 node_modules/is-data-descriptor/node_modules/kind-of/CHANGELOG.md create mode 100644 node_modules/is-data-descriptor/node_modules/kind-of/LICENSE create mode 100644 node_modules/is-data-descriptor/node_modules/kind-of/README.md create mode 100644 node_modules/is-data-descriptor/node_modules/kind-of/index.js create mode 100644 node_modules/is-data-descriptor/node_modules/kind-of/package.json create mode 100644 node_modules/is-data-descriptor/package.json create mode 100644 node_modules/is-descriptor/LICENSE create mode 100644 node_modules/is-descriptor/README.md create mode 100644 node_modules/is-descriptor/index.js create mode 100644 node_modules/is-descriptor/node_modules/kind-of/CHANGELOG.md create mode 100644 node_modules/is-descriptor/node_modules/kind-of/LICENSE create mode 100644 node_modules/is-descriptor/node_modules/kind-of/README.md create mode 100644 node_modules/is-descriptor/node_modules/kind-of/index.js create mode 100644 node_modules/is-descriptor/node_modules/kind-of/package.json create mode 100644 node_modules/is-descriptor/package.json create mode 100644 node_modules/is-extendable/LICENSE create mode 100644 node_modules/is-extendable/README.md create mode 100644 node_modules/is-extendable/index.js create mode 100644 node_modules/is-extendable/package.json create mode 100644 node_modules/is-extglob/LICENSE create mode 100644 node_modules/is-extglob/README.md create mode 100644 node_modules/is-extglob/index.js create mode 100644 node_modules/is-extglob/package.json create mode 100644 node_modules/is-fullwidth-code-point/index.js create mode 100644 node_modules/is-fullwidth-code-point/license create mode 100644 node_modules/is-fullwidth-code-point/package.json create mode 100644 node_modules/is-fullwidth-code-point/readme.md create mode 100644 node_modules/is-glob/LICENSE create mode 100644 node_modules/is-glob/README.md create mode 100644 node_modules/is-glob/index.js create mode 100644 node_modules/is-glob/package.json create mode 100644 node_modules/is-installed-globally/index.js create mode 100644 node_modules/is-installed-globally/license create mode 100644 node_modules/is-installed-globally/package.json create mode 100644 node_modules/is-installed-globally/readme.md create mode 100644 node_modules/is-npm/index.js create mode 100644 node_modules/is-npm/package.json create mode 100644 node_modules/is-npm/readme.md create mode 100644 node_modules/is-number/LICENSE create mode 100644 node_modules/is-number/README.md create mode 100644 node_modules/is-number/index.js create mode 100644 node_modules/is-number/package.json create mode 100644 node_modules/is-obj/index.js create mode 100644 node_modules/is-obj/license create mode 100644 node_modules/is-obj/package.json create mode 100644 node_modules/is-obj/readme.md create mode 100644 node_modules/is-path-inside/index.js create mode 100644 node_modules/is-path-inside/license create mode 100644 node_modules/is-path-inside/package.json create mode 100644 node_modules/is-path-inside/readme.md create mode 100644 node_modules/is-plain-object/LICENSE create mode 100644 node_modules/is-plain-object/README.md create mode 100644 node_modules/is-plain-object/index.d.ts create mode 100644 node_modules/is-plain-object/index.js create mode 100644 node_modules/is-plain-object/package.json create mode 100644 node_modules/is-redirect/index.js create mode 100644 node_modules/is-redirect/license create mode 100644 node_modules/is-redirect/package.json create mode 100644 node_modules/is-redirect/readme.md create mode 100644 node_modules/is-retry-allowed/index.js create mode 100644 node_modules/is-retry-allowed/license create mode 100644 node_modules/is-retry-allowed/package.json create mode 100644 node_modules/is-retry-allowed/readme.md create mode 100644 node_modules/is-stream/index.js create mode 100644 node_modules/is-stream/license create mode 100644 node_modules/is-stream/package.json create mode 100644 node_modules/is-stream/readme.md create mode 100644 node_modules/is-windows/LICENSE create mode 100644 node_modules/is-windows/README.md create mode 100644 node_modules/is-windows/index.js create mode 100644 node_modules/is-windows/package.json create mode 100644 node_modules/isarray/.npmignore create mode 100644 node_modules/isarray/.travis.yml create mode 100644 node_modules/isarray/Makefile create mode 100644 node_modules/isarray/README.md create mode 100644 node_modules/isarray/component.json create mode 100644 node_modules/isarray/index.js create mode 100644 node_modules/isarray/package.json create mode 100644 node_modules/isarray/test.js create mode 100644 node_modules/isexe/.npmignore create mode 100644 node_modules/isexe/LICENSE create mode 100644 node_modules/isexe/README.md create mode 100644 node_modules/isexe/index.js create mode 100644 node_modules/isexe/mode.js create mode 100644 node_modules/isexe/package.json create mode 100644 node_modules/isexe/test/basic.js create mode 100644 node_modules/isexe/windows.js create mode 100644 node_modules/isobject/LICENSE create mode 100644 node_modules/isobject/README.md create mode 100644 node_modules/isobject/index.d.ts create mode 100644 node_modules/isobject/index.js create mode 100644 node_modules/isobject/package.json create mode 100644 node_modules/kind-of/LICENSE create mode 100644 node_modules/kind-of/README.md create mode 100644 node_modules/kind-of/index.js create mode 100644 node_modules/kind-of/package.json create mode 100644 node_modules/latest-version/index.js create mode 100644 node_modules/latest-version/license create mode 100644 node_modules/latest-version/package.json create mode 100644 node_modules/latest-version/readme.md create mode 100644 node_modules/lines-and-columns/LICENSE create mode 100644 node_modules/lines-and-columns/README.md create mode 100644 node_modules/lines-and-columns/dist/index.d.ts create mode 100644 node_modules/lines-and-columns/dist/index.js create mode 100644 node_modules/lines-and-columns/dist/index.mjs create mode 100644 node_modules/lines-and-columns/package.json create mode 100644 node_modules/lowercase-keys/index.js create mode 100644 node_modules/lowercase-keys/license create mode 100644 node_modules/lowercase-keys/package.json create mode 100644 node_modules/lowercase-keys/readme.md create mode 100644 node_modules/lru-cache/LICENSE create mode 100644 node_modules/lru-cache/README.md create mode 100644 node_modules/lru-cache/index.js create mode 100644 node_modules/lru-cache/node_modules/yallist/LICENSE create mode 100644 node_modules/lru-cache/node_modules/yallist/README.md create mode 100644 node_modules/lru-cache/node_modules/yallist/iterator.js create mode 100644 node_modules/lru-cache/node_modules/yallist/package.json create mode 100644 node_modules/lru-cache/node_modules/yallist/yallist.js create mode 100644 node_modules/lru-cache/package.json create mode 100644 node_modules/make-dir/index.js create mode 100644 node_modules/make-dir/license create mode 100644 node_modules/make-dir/package.json create mode 100644 node_modules/make-dir/readme.md create mode 100644 node_modules/map-cache/LICENSE create mode 100644 node_modules/map-cache/README.md create mode 100644 node_modules/map-cache/index.js create mode 100644 node_modules/map-cache/package.json create mode 100644 node_modules/map-visit/LICENSE create mode 100644 node_modules/map-visit/README.md create mode 100644 node_modules/map-visit/index.js create mode 100644 node_modules/map-visit/package.json create mode 100644 node_modules/media-typer/HISTORY.md create mode 100644 node_modules/media-typer/LICENSE create mode 100644 node_modules/media-typer/README.md create mode 100644 node_modules/media-typer/index.js create mode 100644 node_modules/media-typer/package.json create mode 100644 node_modules/merge-descriptors/HISTORY.md create mode 100644 node_modules/merge-descriptors/LICENSE create mode 100644 node_modules/merge-descriptors/README.md create mode 100644 node_modules/merge-descriptors/index.js create mode 100644 node_modules/merge-descriptors/package.json create mode 100644 node_modules/methods/HISTORY.md create mode 100644 node_modules/methods/LICENSE create mode 100644 node_modules/methods/README.md create mode 100644 node_modules/methods/index.js create mode 100644 node_modules/methods/package.json create mode 100644 node_modules/micromatch/CHANGELOG.md create mode 100755 node_modules/micromatch/LICENSE create mode 100644 node_modules/micromatch/README.md create mode 100644 node_modules/micromatch/index.js create mode 100644 node_modules/micromatch/lib/cache.js create mode 100644 node_modules/micromatch/lib/compilers.js create mode 100644 node_modules/micromatch/lib/parsers.js create mode 100644 node_modules/micromatch/lib/utils.js create mode 100644 node_modules/micromatch/node_modules/define-property/CHANGELOG.md create mode 100644 node_modules/micromatch/node_modules/define-property/LICENSE create mode 100644 node_modules/micromatch/node_modules/define-property/README.md create mode 100644 node_modules/micromatch/node_modules/define-property/index.js create mode 100644 node_modules/micromatch/node_modules/define-property/package.json create mode 100644 node_modules/micromatch/node_modules/extend-shallow/LICENSE create mode 100644 node_modules/micromatch/node_modules/extend-shallow/README.md create mode 100644 node_modules/micromatch/node_modules/extend-shallow/index.js create mode 100644 node_modules/micromatch/node_modules/extend-shallow/package.json create mode 100644 node_modules/micromatch/node_modules/is-extendable/LICENSE create mode 100644 node_modules/micromatch/node_modules/is-extendable/README.md create mode 100644 node_modules/micromatch/node_modules/is-extendable/index.d.ts create mode 100644 node_modules/micromatch/node_modules/is-extendable/index.js create mode 100644 node_modules/micromatch/node_modules/is-extendable/package.json create mode 100644 node_modules/micromatch/node_modules/kind-of/CHANGELOG.md create mode 100644 node_modules/micromatch/node_modules/kind-of/LICENSE create mode 100644 node_modules/micromatch/node_modules/kind-of/README.md create mode 100644 node_modules/micromatch/node_modules/kind-of/index.js create mode 100644 node_modules/micromatch/node_modules/kind-of/package.json create mode 100644 node_modules/micromatch/package.json create mode 100644 node_modules/mime-db/HISTORY.md create mode 100644 node_modules/mime-db/LICENSE create mode 100644 node_modules/mime-db/README.md create mode 100644 node_modules/mime-db/db.json create mode 100644 node_modules/mime-db/index.js create mode 100644 node_modules/mime-db/package.json create mode 100644 node_modules/mime-types/HISTORY.md create mode 100644 node_modules/mime-types/LICENSE create mode 100644 node_modules/mime-types/README.md create mode 100644 node_modules/mime-types/index.js create mode 100644 node_modules/mime-types/package.json create mode 100644 node_modules/mime/.npmignore create mode 100644 node_modules/mime/CHANGELOG.md create mode 100644 node_modules/mime/LICENSE create mode 100644 node_modules/mime/README.md create mode 100755 node_modules/mime/cli.js create mode 100644 node_modules/mime/mime.js create mode 100644 node_modules/mime/package.json create mode 100755 node_modules/mime/src/build.js create mode 100644 node_modules/mime/src/test.js create mode 100644 node_modules/mime/types.json create mode 100644 node_modules/minimatch/LICENSE create mode 100644 node_modules/minimatch/README.md create mode 100644 node_modules/minimatch/minimatch.js create mode 100644 node_modules/minimatch/package.json create mode 100644 node_modules/minimist/.travis.yml create mode 100644 node_modules/minimist/LICENSE create mode 100644 node_modules/minimist/example/parse.js create mode 100644 node_modules/minimist/index.js create mode 100644 node_modules/minimist/package.json create mode 100644 node_modules/minimist/readme.markdown create mode 100644 node_modules/minimist/test/dash.js create mode 100644 node_modules/minimist/test/default_bool.js create mode 100644 node_modules/minimist/test/dotted.js create mode 100644 node_modules/minimist/test/long.js create mode 100644 node_modules/minimist/test/parse.js create mode 100644 node_modules/minimist/test/parse_modified.js create mode 100644 node_modules/minimist/test/short.js create mode 100644 node_modules/minimist/test/whitespace.js create mode 100644 node_modules/minipass/LICENSE create mode 100644 node_modules/minipass/README.md create mode 100644 node_modules/minipass/index.js create mode 100644 node_modules/minipass/node_modules/safe-buffer/LICENSE create mode 100644 node_modules/minipass/node_modules/safe-buffer/README.md create mode 100644 node_modules/minipass/node_modules/safe-buffer/index.d.ts create mode 100644 node_modules/minipass/node_modules/safe-buffer/index.js create mode 100644 node_modules/minipass/node_modules/safe-buffer/package.json create mode 100644 node_modules/minipass/package.json create mode 100644 node_modules/minizlib/LICENSE create mode 100644 node_modules/minizlib/README.md create mode 100644 node_modules/minizlib/constants.js create mode 100644 node_modules/minizlib/index.js create mode 100644 node_modules/minizlib/package.json create mode 100644 node_modules/mixin-deep/LICENSE create mode 100644 node_modules/mixin-deep/README.md create mode 100644 node_modules/mixin-deep/index.js create mode 100644 node_modules/mixin-deep/node_modules/is-extendable/LICENSE create mode 100644 node_modules/mixin-deep/node_modules/is-extendable/README.md create mode 100644 node_modules/mixin-deep/node_modules/is-extendable/index.d.ts create mode 100644 node_modules/mixin-deep/node_modules/is-extendable/index.js create mode 100644 node_modules/mixin-deep/node_modules/is-extendable/package.json create mode 100644 node_modules/mixin-deep/package.json create mode 100644 node_modules/mkdirp/.travis.yml create mode 100644 node_modules/mkdirp/LICENSE create mode 100755 node_modules/mkdirp/bin/cmd.js create mode 100644 node_modules/mkdirp/bin/usage.txt create mode 100644 node_modules/mkdirp/examples/pow.js create mode 100644 node_modules/mkdirp/index.js create mode 100644 node_modules/mkdirp/package.json create mode 100644 node_modules/mkdirp/readme.markdown create mode 100644 node_modules/mkdirp/test/chmod.js create mode 100644 node_modules/mkdirp/test/clobber.js create mode 100644 node_modules/mkdirp/test/mkdirp.js create mode 100644 node_modules/mkdirp/test/opts_fs.js create mode 100644 node_modules/mkdirp/test/opts_fs_sync.js create mode 100644 node_modules/mkdirp/test/perm.js create mode 100644 node_modules/mkdirp/test/perm_sync.js create mode 100644 node_modules/mkdirp/test/race.js create mode 100644 node_modules/mkdirp/test/rel.js create mode 100644 node_modules/mkdirp/test/return.js create mode 100644 node_modules/mkdirp/test/return_sync.js create mode 100644 node_modules/mkdirp/test/root.js create mode 100644 node_modules/mkdirp/test/sync.js create mode 100644 node_modules/mkdirp/test/umask.js create mode 100644 node_modules/mkdirp/test/umask_sync.js create mode 100644 node_modules/ms/index.js create mode 100644 node_modules/ms/license.md create mode 100644 node_modules/ms/package.json create mode 100644 node_modules/ms/readme.md create mode 100644 node_modules/mz/HISTORY.md create mode 100644 node_modules/mz/LICENSE create mode 100644 node_modules/mz/README.md create mode 100644 node_modules/mz/child_process.js create mode 100644 node_modules/mz/crypto.js create mode 100644 node_modules/mz/dns.js create mode 100644 node_modules/mz/fs.js create mode 100644 node_modules/mz/index.js create mode 100644 node_modules/mz/package.json create mode 100644 node_modules/mz/readline.js create mode 100644 node_modules/mz/zlib.js create mode 100644 node_modules/nan/CHANGELOG.md create mode 100644 node_modules/nan/LICENSE.md create mode 100644 node_modules/nan/README.md create mode 100644 node_modules/nan/doc/asyncworker.md create mode 100644 node_modules/nan/doc/buffers.md create mode 100644 node_modules/nan/doc/callback.md create mode 100644 node_modules/nan/doc/converters.md create mode 100644 node_modules/nan/doc/errors.md create mode 100644 node_modules/nan/doc/json.md create mode 100644 node_modules/nan/doc/maybe_types.md create mode 100644 node_modules/nan/doc/methods.md create mode 100644 node_modules/nan/doc/new.md create mode 100644 node_modules/nan/doc/node_misc.md create mode 100644 node_modules/nan/doc/object_wrappers.md create mode 100644 node_modules/nan/doc/persistent.md create mode 100644 node_modules/nan/doc/scopes.md create mode 100644 node_modules/nan/doc/script.md create mode 100644 node_modules/nan/doc/string_bytes.md create mode 100644 node_modules/nan/doc/v8_internals.md create mode 100644 node_modules/nan/doc/v8_misc.md create mode 100644 node_modules/nan/include_dirs.js create mode 100644 node_modules/nan/nan.h create mode 100644 node_modules/nan/nan_callbacks.h create mode 100644 node_modules/nan/nan_callbacks_12_inl.h create mode 100644 node_modules/nan/nan_callbacks_pre_12_inl.h create mode 100644 node_modules/nan/nan_converters.h create mode 100644 node_modules/nan/nan_converters_43_inl.h create mode 100644 node_modules/nan/nan_converters_pre_43_inl.h create mode 100644 node_modules/nan/nan_define_own_property_helper.h create mode 100644 node_modules/nan/nan_implementation_12_inl.h create mode 100644 node_modules/nan/nan_implementation_pre_12_inl.h create mode 100644 node_modules/nan/nan_json.h create mode 100644 node_modules/nan/nan_maybe_43_inl.h create mode 100644 node_modules/nan/nan_maybe_pre_43_inl.h create mode 100644 node_modules/nan/nan_new.h create mode 100644 node_modules/nan/nan_object_wrap.h create mode 100644 node_modules/nan/nan_persistent_12_inl.h create mode 100644 node_modules/nan/nan_persistent_pre_12_inl.h create mode 100644 node_modules/nan/nan_private.h create mode 100644 node_modules/nan/nan_string_bytes.h create mode 100644 node_modules/nan/nan_typedarray_contents.h create mode 100644 node_modules/nan/nan_weak.h create mode 100644 node_modules/nan/package.json create mode 100755 node_modules/nan/tools/1to2.js create mode 100644 node_modules/nan/tools/README.md create mode 100644 node_modules/nan/tools/package.json create mode 100644 node_modules/nanomatch/CHANGELOG.md create mode 100644 node_modules/nanomatch/LICENSE create mode 100644 node_modules/nanomatch/README.md create mode 100644 node_modules/nanomatch/index.js create mode 100644 node_modules/nanomatch/lib/cache.js create mode 100644 node_modules/nanomatch/lib/compilers.js create mode 100644 node_modules/nanomatch/lib/parsers.js create mode 100644 node_modules/nanomatch/lib/utils.js create mode 100644 node_modules/nanomatch/node_modules/define-property/CHANGELOG.md create mode 100644 node_modules/nanomatch/node_modules/define-property/LICENSE create mode 100644 node_modules/nanomatch/node_modules/define-property/README.md create mode 100644 node_modules/nanomatch/node_modules/define-property/index.js create mode 100644 node_modules/nanomatch/node_modules/define-property/package.json create mode 100644 node_modules/nanomatch/node_modules/extend-shallow/LICENSE create mode 100644 node_modules/nanomatch/node_modules/extend-shallow/README.md create mode 100644 node_modules/nanomatch/node_modules/extend-shallow/index.js create mode 100644 node_modules/nanomatch/node_modules/extend-shallow/package.json create mode 100644 node_modules/nanomatch/node_modules/is-extendable/LICENSE create mode 100644 node_modules/nanomatch/node_modules/is-extendable/README.md create mode 100644 node_modules/nanomatch/node_modules/is-extendable/index.d.ts create mode 100644 node_modules/nanomatch/node_modules/is-extendable/index.js create mode 100644 node_modules/nanomatch/node_modules/is-extendable/package.json create mode 100644 node_modules/nanomatch/node_modules/kind-of/CHANGELOG.md create mode 100644 node_modules/nanomatch/node_modules/kind-of/LICENSE create mode 100644 node_modules/nanomatch/node_modules/kind-of/README.md create mode 100644 node_modules/nanomatch/node_modules/kind-of/index.js create mode 100644 node_modules/nanomatch/node_modules/kind-of/package.json create mode 100644 node_modules/nanomatch/package.json create mode 100644 node_modules/needle/README.md create mode 100755 node_modules/needle/bin/needle create mode 100644 node_modules/needle/examples/deflated-stream.js create mode 100644 node_modules/needle/examples/digest-auth.js create mode 100644 node_modules/needle/examples/download-to-file.js create mode 100644 node_modules/needle/examples/multipart-stream.js create mode 100644 node_modules/needle/examples/parsed-stream.js create mode 100644 node_modules/needle/examples/parsed-stream2.js create mode 100644 node_modules/needle/examples/stream-events.js create mode 100644 node_modules/needle/examples/stream-to-file.js create mode 100644 node_modules/needle/examples/upload-image.js create mode 100644 node_modules/needle/lib/auth.js create mode 100644 node_modules/needle/lib/cookies.js create mode 100644 node_modules/needle/lib/decoder.js create mode 100644 node_modules/needle/lib/multipart.js create mode 100644 node_modules/needle/lib/needle.js create mode 100644 node_modules/needle/lib/parsers.js create mode 100644 node_modules/needle/lib/querystring.js create mode 100644 node_modules/needle/license.txt create mode 100644 node_modules/needle/node_modules/debug/CHANGELOG.md create mode 100644 node_modules/needle/node_modules/debug/LICENSE create mode 100644 node_modules/needle/node_modules/debug/README.md create mode 100644 node_modules/needle/node_modules/debug/dist/debug.js create mode 100644 node_modules/needle/node_modules/debug/node.js create mode 100644 node_modules/needle/node_modules/debug/package.json create mode 100644 node_modules/needle/node_modules/debug/src/browser.js create mode 100644 node_modules/needle/node_modules/debug/src/common.js create mode 100644 node_modules/needle/node_modules/debug/src/index.js create mode 100644 node_modules/needle/node_modules/debug/src/node.js create mode 100644 node_modules/needle/node_modules/ms/index.js create mode 100644 node_modules/needle/node_modules/ms/license.md create mode 100644 node_modules/needle/node_modules/ms/package.json create mode 100644 node_modules/needle/node_modules/ms/readme.md create mode 100644 node_modules/needle/package.json create mode 100644 node_modules/needle/test/basic_auth_spec.js create mode 100644 node_modules/needle/test/compression_spec.js create mode 100644 node_modules/needle/test/cookies_spec.js create mode 100644 node_modules/needle/test/decoder_spec.js create mode 100644 node_modules/needle/test/errors_spec.js create mode 100644 node_modules/needle/test/headers_spec.js create mode 100644 node_modules/needle/test/helpers.js create mode 100644 node_modules/needle/test/long_string_spec.js create mode 100644 node_modules/needle/test/output_spec.js create mode 100644 node_modules/needle/test/parsing_spec.js create mode 100644 node_modules/needle/test/post_data_spec.js create mode 100644 node_modules/needle/test/proxy_spec.js create mode 100644 node_modules/needle/test/querystring_spec.js create mode 100644 node_modules/needle/test/redirect_spec.js create mode 100644 node_modules/needle/test/redirect_with_timeout.js create mode 100644 node_modules/needle/test/request_stream_spec.js create mode 100644 node_modules/needle/test/response_stream_spec.js create mode 100644 node_modules/needle/test/socket_pool_spec.js create mode 100644 node_modules/needle/test/url_spec.js create mode 100644 node_modules/needle/test/utils/formidable.js create mode 100644 node_modules/needle/test/utils/proxy.js create mode 100644 node_modules/needle/test/utils/test.js create mode 100644 node_modules/negotiator/HISTORY.md create mode 100644 node_modules/negotiator/LICENSE create mode 100644 node_modules/negotiator/README.md create mode 100644 node_modules/negotiator/index.js create mode 100644 node_modules/negotiator/lib/charset.js create mode 100644 node_modules/negotiator/lib/encoding.js create mode 100644 node_modules/negotiator/lib/language.js create mode 100644 node_modules/negotiator/lib/mediaType.js create mode 100644 node_modules/negotiator/package.json create mode 100644 node_modules/node-modules-regexp/index.js create mode 100644 node_modules/node-modules-regexp/license create mode 100644 node_modules/node-modules-regexp/package.json create mode 100644 node_modules/node-modules-regexp/readme.md create mode 100644 node_modules/node-pre-gyp/CHANGELOG.md create mode 100644 node_modules/node-pre-gyp/LICENSE create mode 100644 node_modules/node-pre-gyp/README.md create mode 100644 node_modules/node-pre-gyp/appveyor.yml create mode 100755 node_modules/node-pre-gyp/bin/node-pre-gyp create mode 100644 node_modules/node-pre-gyp/bin/node-pre-gyp.cmd create mode 100644 node_modules/node-pre-gyp/contributing.md create mode 100644 node_modules/node-pre-gyp/lib/build.js create mode 100644 node_modules/node-pre-gyp/lib/clean.js create mode 100644 node_modules/node-pre-gyp/lib/configure.js create mode 100644 node_modules/node-pre-gyp/lib/info.js create mode 100644 node_modules/node-pre-gyp/lib/install.js create mode 100644 node_modules/node-pre-gyp/lib/node-pre-gyp.js create mode 100644 node_modules/node-pre-gyp/lib/package.js create mode 100644 node_modules/node-pre-gyp/lib/pre-binding.js create mode 100644 node_modules/node-pre-gyp/lib/publish.js create mode 100644 node_modules/node-pre-gyp/lib/rebuild.js create mode 100644 node_modules/node-pre-gyp/lib/reinstall.js create mode 100644 node_modules/node-pre-gyp/lib/reveal.js create mode 100644 node_modules/node-pre-gyp/lib/testbinary.js create mode 100644 node_modules/node-pre-gyp/lib/testpackage.js create mode 100644 node_modules/node-pre-gyp/lib/unpublish.js create mode 100644 node_modules/node-pre-gyp/lib/util/abi_crosswalk.json create mode 100644 node_modules/node-pre-gyp/lib/util/compile.js create mode 100644 node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js create mode 100644 node_modules/node-pre-gyp/lib/util/napi.js create mode 100644 node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html create mode 100644 node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json create mode 100644 node_modules/node-pre-gyp/lib/util/s3_setup.js create mode 100644 node_modules/node-pre-gyp/lib/util/versioning.js create mode 120000 node_modules/node-pre-gyp/node_modules/.bin/detect-libc create mode 120000 node_modules/node-pre-gyp/node_modules/.bin/mkdirp create mode 120000 node_modules/node-pre-gyp/node_modules/.bin/needle create mode 120000 node_modules/node-pre-gyp/node_modules/.bin/nopt create mode 120000 node_modules/node-pre-gyp/node_modules/.bin/rc create mode 120000 node_modules/node-pre-gyp/node_modules/.bin/rimraf create mode 120000 node_modules/node-pre-gyp/node_modules/.bin/semver create mode 100644 node_modules/node-pre-gyp/node_modules/nopt/.npmignore create mode 100644 node_modules/node-pre-gyp/node_modules/nopt/.travis.yml create mode 100644 node_modules/node-pre-gyp/node_modules/nopt/CHANGELOG.md create mode 100644 node_modules/node-pre-gyp/node_modules/nopt/LICENSE create mode 100644 node_modules/node-pre-gyp/node_modules/nopt/README.md create mode 100755 node_modules/node-pre-gyp/node_modules/nopt/bin/nopt.js create mode 100755 node_modules/node-pre-gyp/node_modules/nopt/examples/my-program.js create mode 100644 node_modules/node-pre-gyp/node_modules/nopt/lib/nopt.js create mode 100644 node_modules/node-pre-gyp/node_modules/nopt/package.json create mode 100644 node_modules/node-pre-gyp/node_modules/nopt/test/basic.js create mode 100644 node_modules/node-pre-gyp/package.json create mode 100644 node_modules/nodemon/.jscsrc create mode 100644 node_modules/nodemon/.jshintrc create mode 100644 node_modules/nodemon/.travis.yml create mode 100644 node_modules/nodemon/README.md create mode 100755 node_modules/nodemon/bin/nodemon.js create mode 100755 node_modules/nodemon/bin/postinstall.js create mode 100644 node_modules/nodemon/commitlint.config.js create mode 100644 node_modules/nodemon/doc/cli/authors.txt create mode 100644 node_modules/nodemon/doc/cli/config.txt create mode 100644 node_modules/nodemon/doc/cli/help.txt create mode 100644 node_modules/nodemon/doc/cli/logo.txt create mode 100644 node_modules/nodemon/doc/cli/options.txt create mode 100644 node_modules/nodemon/doc/cli/topics.txt create mode 100644 node_modules/nodemon/doc/cli/usage.txt create mode 100644 node_modules/nodemon/doc/cli/whoami.txt create mode 100644 node_modules/nodemon/lib/cli/index.js create mode 100644 node_modules/nodemon/lib/cli/parse.js create mode 100644 node_modules/nodemon/lib/config/command.js create mode 100644 node_modules/nodemon/lib/config/defaults.js create mode 100644 node_modules/nodemon/lib/config/exec.js create mode 100644 node_modules/nodemon/lib/config/index.js create mode 100644 node_modules/nodemon/lib/config/load.js create mode 100644 node_modules/nodemon/lib/help/index.js create mode 100644 node_modules/nodemon/lib/index.js create mode 100644 node_modules/nodemon/lib/monitor/index.js create mode 100644 node_modules/nodemon/lib/monitor/match.js create mode 100644 node_modules/nodemon/lib/monitor/run.js create mode 100644 node_modules/nodemon/lib/monitor/signals.js create mode 100644 node_modules/nodemon/lib/monitor/watch.js create mode 100644 node_modules/nodemon/lib/nodemon.js create mode 100644 node_modules/nodemon/lib/rules/add.js create mode 100644 node_modules/nodemon/lib/rules/index.js create mode 100644 node_modules/nodemon/lib/rules/parse.js create mode 100644 node_modules/nodemon/lib/spawn.js create mode 100644 node_modules/nodemon/lib/utils/bus.js create mode 100644 node_modules/nodemon/lib/utils/clone.js create mode 100644 node_modules/nodemon/lib/utils/colour.js create mode 100644 node_modules/nodemon/lib/utils/index.js create mode 100644 node_modules/nodemon/lib/utils/log.js create mode 100644 node_modules/nodemon/lib/utils/merge.js create mode 100644 node_modules/nodemon/lib/version.js create mode 120000 node_modules/nodemon/node_modules/.bin/nodetouch create mode 120000 node_modules/nodemon/node_modules/.bin/semver create mode 100644 node_modules/nodemon/node_modules/debug/CHANGELOG.md create mode 100644 node_modules/nodemon/node_modules/debug/LICENSE create mode 100644 node_modules/nodemon/node_modules/debug/README.md create mode 100644 node_modules/nodemon/node_modules/debug/dist/debug.js create mode 100644 node_modules/nodemon/node_modules/debug/node.js create mode 100644 node_modules/nodemon/node_modules/debug/package.json create mode 100644 node_modules/nodemon/node_modules/debug/src/browser.js create mode 100644 node_modules/nodemon/node_modules/debug/src/common.js create mode 100644 node_modules/nodemon/node_modules/debug/src/index.js create mode 100644 node_modules/nodemon/node_modules/debug/src/node.js create mode 100644 node_modules/nodemon/node_modules/ms/index.js create mode 100644 node_modules/nodemon/node_modules/ms/license.md create mode 100644 node_modules/nodemon/node_modules/ms/package.json create mode 100644 node_modules/nodemon/node_modules/ms/readme.md create mode 100644 node_modules/nodemon/package.json create mode 100644 node_modules/nopt/.gitignore create mode 100644 node_modules/nopt/LICENSE create mode 100644 node_modules/nopt/README.md create mode 100755 node_modules/nopt/bin/nopt.js create mode 100755 node_modules/nopt/examples/my-program.js create mode 100644 node_modules/nopt/lib/nopt.js create mode 100644 node_modules/nopt/package.json create mode 100644 node_modules/normalize-path/LICENSE create mode 100644 node_modules/normalize-path/README.md create mode 100644 node_modules/normalize-path/index.js create mode 100644 node_modules/normalize-path/package.json create mode 100644 node_modules/npm-bundled/LICENSE create mode 100644 node_modules/npm-bundled/README.md create mode 100644 node_modules/npm-bundled/index.js create mode 100644 node_modules/npm-bundled/package.json create mode 100644 node_modules/npm-packlist/LICENSE create mode 100644 node_modules/npm-packlist/README.md create mode 100644 node_modules/npm-packlist/index.js create mode 100644 node_modules/npm-packlist/package.json create mode 100644 node_modules/npm-run-path/index.js create mode 100644 node_modules/npm-run-path/license create mode 100644 node_modules/npm-run-path/package.json create mode 100644 node_modules/npm-run-path/readme.md create mode 100644 node_modules/npmlog/CHANGELOG.md create mode 100644 node_modules/npmlog/LICENSE create mode 100644 node_modules/npmlog/README.md create mode 100644 node_modules/npmlog/log.js create mode 100644 node_modules/npmlog/package.json create mode 100644 node_modules/number-is-nan/index.js create mode 100644 node_modules/number-is-nan/license create mode 100644 node_modules/number-is-nan/package.json create mode 100644 node_modules/number-is-nan/readme.md create mode 100644 node_modules/object-assign/index.js create mode 100644 node_modules/object-assign/license create mode 100644 node_modules/object-assign/package.json create mode 100644 node_modules/object-assign/readme.md create mode 100644 node_modules/object-copy/LICENSE create mode 100644 node_modules/object-copy/index.js create mode 100644 node_modules/object-copy/package.json create mode 100644 node_modules/object-visit/LICENSE create mode 100644 node_modules/object-visit/README.md create mode 100644 node_modules/object-visit/index.js create mode 100644 node_modules/object-visit/package.json create mode 100644 node_modules/object.pick/LICENSE create mode 100644 node_modules/object.pick/README.md create mode 100644 node_modules/object.pick/index.js create mode 100644 node_modules/object.pick/package.json create mode 100644 node_modules/on-finished/HISTORY.md create mode 100644 node_modules/on-finished/LICENSE create mode 100644 node_modules/on-finished/README.md create mode 100644 node_modules/on-finished/index.js create mode 100644 node_modules/on-finished/package.json create mode 100644 node_modules/once/LICENSE create mode 100644 node_modules/once/README.md create mode 100644 node_modules/once/once.js create mode 100644 node_modules/once/package.json create mode 100644 node_modules/os-homedir/index.js create mode 100644 node_modules/os-homedir/license create mode 100644 node_modules/os-homedir/package.json create mode 100644 node_modules/os-homedir/readme.md create mode 100644 node_modules/os-tmpdir/index.js create mode 100644 node_modules/os-tmpdir/license create mode 100644 node_modules/os-tmpdir/package.json create mode 100644 node_modules/os-tmpdir/readme.md create mode 100644 node_modules/osenv/LICENSE create mode 100644 node_modules/osenv/README.md create mode 100644 node_modules/osenv/osenv.js create mode 100644 node_modules/osenv/package.json create mode 100644 node_modules/p-finally/index.js create mode 100644 node_modules/p-finally/license create mode 100644 node_modules/p-finally/package.json create mode 100644 node_modules/p-finally/readme.md create mode 100644 node_modules/package-json/index.js create mode 100644 node_modules/package-json/license create mode 120000 node_modules/package-json/node_modules/.bin/semver create mode 100644 node_modules/package-json/package.json create mode 100644 node_modules/package-json/readme.md create mode 100644 node_modules/parseurl/HISTORY.md create mode 100644 node_modules/parseurl/LICENSE create mode 100644 node_modules/parseurl/README.md create mode 100644 node_modules/parseurl/index.js create mode 100644 node_modules/parseurl/package.json create mode 100644 node_modules/pascalcase/LICENSE create mode 100644 node_modules/pascalcase/README.md create mode 100644 node_modules/pascalcase/index.js create mode 100644 node_modules/pascalcase/package.json create mode 100644 node_modules/path-dirname/index.js create mode 100644 node_modules/path-dirname/license create mode 100644 node_modules/path-dirname/package.json create mode 100644 node_modules/path-dirname/readme.md create mode 100644 node_modules/path-is-absolute/index.js create mode 100644 node_modules/path-is-absolute/license create mode 100644 node_modules/path-is-absolute/package.json create mode 100644 node_modules/path-is-absolute/readme.md create mode 100644 node_modules/path-is-inside/LICENSE.txt create mode 100644 node_modules/path-is-inside/lib/path-is-inside.js create mode 100644 node_modules/path-is-inside/package.json create mode 100644 node_modules/path-key/index.js create mode 100644 node_modules/path-key/license create mode 100644 node_modules/path-key/package.json create mode 100644 node_modules/path-key/readme.md create mode 100644 node_modules/path-to-regexp/History.md create mode 100644 node_modules/path-to-regexp/LICENSE create mode 100644 node_modules/path-to-regexp/Readme.md create mode 100644 node_modules/path-to-regexp/index.js create mode 100644 node_modules/path-to-regexp/package.json create mode 100644 node_modules/pify/index.js create mode 100644 node_modules/pify/license create mode 100644 node_modules/pify/package.json create mode 100644 node_modules/pify/readme.md create mode 100644 node_modules/pirates/LICENSE create mode 100644 node_modules/pirates/README.md create mode 100644 node_modules/pirates/index.d.ts create mode 100644 node_modules/pirates/lib/index.js create mode 100644 node_modules/pirates/package.json create mode 100644 node_modules/posix-character-classes/LICENSE create mode 100644 node_modules/posix-character-classes/README.md create mode 100644 node_modules/posix-character-classes/index.js create mode 100644 node_modules/posix-character-classes/package.json create mode 100644 node_modules/prepend-http/index.js create mode 100644 node_modules/prepend-http/license create mode 100644 node_modules/prepend-http/package.json create mode 100644 node_modules/prepend-http/readme.md create mode 100644 node_modules/process-nextick-args/index.js create mode 100644 node_modules/process-nextick-args/license.md create mode 100644 node_modules/process-nextick-args/package.json create mode 100644 node_modules/process-nextick-args/readme.md create mode 100644 node_modules/proxy-addr/HISTORY.md create mode 100644 node_modules/proxy-addr/LICENSE create mode 100644 node_modules/proxy-addr/README.md create mode 100644 node_modules/proxy-addr/index.js create mode 100644 node_modules/proxy-addr/package.json create mode 100644 node_modules/pseudomap/LICENSE create mode 100644 node_modules/pseudomap/README.md create mode 100644 node_modules/pseudomap/map.js create mode 100644 node_modules/pseudomap/package.json create mode 100644 node_modules/pseudomap/pseudomap.js create mode 100644 node_modules/pseudomap/test/basic.js create mode 100644 node_modules/pstree.remy/.travis.yml create mode 100644 node_modules/pstree.remy/LICENSE create mode 100644 node_modules/pstree.remy/lib/index.js create mode 100644 node_modules/pstree.remy/lib/tree.js create mode 100644 node_modules/pstree.remy/lib/utils.js create mode 100644 node_modules/pstree.remy/package.json create mode 100644 node_modules/pstree.remy/tests/fixtures/index.js create mode 100644 node_modules/pstree.remy/tests/fixtures/out1 create mode 100644 node_modules/pstree.remy/tests/fixtures/out2 create mode 100644 node_modules/pstree.remy/tests/index.test.js create mode 100644 node_modules/qs/.editorconfig create mode 100644 node_modules/qs/.eslintignore create mode 100644 node_modules/qs/.eslintrc create mode 100644 node_modules/qs/CHANGELOG.md create mode 100644 node_modules/qs/LICENSE create mode 100644 node_modules/qs/README.md create mode 100644 node_modules/qs/dist/qs.js create mode 100644 node_modules/qs/lib/formats.js create mode 100644 node_modules/qs/lib/index.js create mode 100644 node_modules/qs/lib/parse.js create mode 100644 node_modules/qs/lib/stringify.js create mode 100644 node_modules/qs/lib/utils.js create mode 100644 node_modules/qs/package.json create mode 100644 node_modules/qs/test/.eslintrc create mode 100644 node_modules/qs/test/index.js create mode 100644 node_modules/qs/test/parse.js create mode 100644 node_modules/qs/test/stringify.js create mode 100644 node_modules/qs/test/utils.js create mode 100644 node_modules/range-parser/HISTORY.md create mode 100644 node_modules/range-parser/LICENSE create mode 100644 node_modules/range-parser/README.md create mode 100644 node_modules/range-parser/index.js create mode 100644 node_modules/range-parser/package.json create mode 100644 node_modules/raw-body/HISTORY.md create mode 100644 node_modules/raw-body/LICENSE create mode 100644 node_modules/raw-body/README.md create mode 100644 node_modules/raw-body/index.d.ts create mode 100644 node_modules/raw-body/index.js create mode 100644 node_modules/raw-body/package.json create mode 100644 node_modules/rc/LICENSE.APACHE2 create mode 100644 node_modules/rc/LICENSE.BSD create mode 100644 node_modules/rc/LICENSE.MIT create mode 100644 node_modules/rc/README.md create mode 100644 node_modules/rc/browser.js create mode 100755 node_modules/rc/cli.js create mode 100755 node_modules/rc/index.js create mode 100644 node_modules/rc/lib/utils.js create mode 100644 node_modules/rc/node_modules/minimist/.travis.yml create mode 100644 node_modules/rc/node_modules/minimist/LICENSE create mode 100644 node_modules/rc/node_modules/minimist/example/parse.js create mode 100644 node_modules/rc/node_modules/minimist/index.js create mode 100644 node_modules/rc/node_modules/minimist/package.json create mode 100644 node_modules/rc/node_modules/minimist/readme.markdown create mode 100644 node_modules/rc/node_modules/minimist/test/all_bool.js create mode 100644 node_modules/rc/node_modules/minimist/test/bool.js create mode 100644 node_modules/rc/node_modules/minimist/test/dash.js create mode 100644 node_modules/rc/node_modules/minimist/test/default_bool.js create mode 100644 node_modules/rc/node_modules/minimist/test/dotted.js create mode 100644 node_modules/rc/node_modules/minimist/test/kv_short.js create mode 100644 node_modules/rc/node_modules/minimist/test/long.js create mode 100644 node_modules/rc/node_modules/minimist/test/num.js create mode 100644 node_modules/rc/node_modules/minimist/test/parse.js create mode 100644 node_modules/rc/node_modules/minimist/test/parse_modified.js create mode 100644 node_modules/rc/node_modules/minimist/test/short.js create mode 100644 node_modules/rc/node_modules/minimist/test/stop_early.js create mode 100644 node_modules/rc/node_modules/minimist/test/unknown.js create mode 100644 node_modules/rc/node_modules/minimist/test/whitespace.js create mode 100644 node_modules/rc/package.json create mode 100644 node_modules/rc/test/ini.js create mode 100644 node_modules/rc/test/nested-env-vars.js create mode 100644 node_modules/rc/test/test.js create mode 100644 node_modules/readable-stream/.travis.yml create mode 100644 node_modules/readable-stream/CONTRIBUTING.md create mode 100644 node_modules/readable-stream/GOVERNANCE.md create mode 100644 node_modules/readable-stream/LICENSE create mode 100644 node_modules/readable-stream/README.md create mode 100644 node_modules/readable-stream/doc/wg-meetings/2015-01-30.md create mode 100644 node_modules/readable-stream/duplex-browser.js create mode 100644 node_modules/readable-stream/duplex.js create mode 100644 node_modules/readable-stream/lib/_stream_duplex.js create mode 100644 node_modules/readable-stream/lib/_stream_passthrough.js create mode 100644 node_modules/readable-stream/lib/_stream_readable.js create mode 100644 node_modules/readable-stream/lib/_stream_transform.js create mode 100644 node_modules/readable-stream/lib/_stream_writable.js create mode 100644 node_modules/readable-stream/lib/internal/streams/BufferList.js create mode 100644 node_modules/readable-stream/lib/internal/streams/destroy.js create mode 100644 node_modules/readable-stream/lib/internal/streams/stream-browser.js create mode 100644 node_modules/readable-stream/lib/internal/streams/stream.js create mode 100644 node_modules/readable-stream/package.json create mode 100644 node_modules/readable-stream/passthrough.js create mode 100644 node_modules/readable-stream/readable-browser.js create mode 100644 node_modules/readable-stream/readable.js create mode 100644 node_modules/readable-stream/transform.js create mode 100644 node_modules/readable-stream/writable-browser.js create mode 100644 node_modules/readable-stream/writable.js create mode 100644 node_modules/readdirp/LICENSE create mode 100644 node_modules/readdirp/README.md create mode 100644 node_modules/readdirp/package.json create mode 100644 node_modules/readdirp/readdirp.js create mode 100644 node_modules/readdirp/stream-api.js create mode 100644 node_modules/regex-not/LICENSE create mode 100644 node_modules/regex-not/README.md create mode 100644 node_modules/regex-not/index.js create mode 100644 node_modules/regex-not/node_modules/extend-shallow/LICENSE create mode 100644 node_modules/regex-not/node_modules/extend-shallow/README.md create mode 100644 node_modules/regex-not/node_modules/extend-shallow/index.js create mode 100644 node_modules/regex-not/node_modules/extend-shallow/package.json create mode 100644 node_modules/regex-not/node_modules/is-extendable/LICENSE create mode 100644 node_modules/regex-not/node_modules/is-extendable/README.md create mode 100644 node_modules/regex-not/node_modules/is-extendable/index.d.ts create mode 100644 node_modules/regex-not/node_modules/is-extendable/index.js create mode 100644 node_modules/regex-not/node_modules/is-extendable/package.json create mode 100644 node_modules/regex-not/package.json create mode 100644 node_modules/registry-auth-token/.npmignore create mode 100644 node_modules/registry-auth-token/CHANGELOG.md create mode 100644 node_modules/registry-auth-token/LICENSE create mode 100644 node_modules/registry-auth-token/README.md create mode 100644 node_modules/registry-auth-token/base64.js create mode 100644 node_modules/registry-auth-token/index.js create mode 120000 node_modules/registry-auth-token/node_modules/.bin/rc create mode 100644 node_modules/registry-auth-token/node_modules/safe-buffer/LICENSE create mode 100644 node_modules/registry-auth-token/node_modules/safe-buffer/README.md create mode 100644 node_modules/registry-auth-token/node_modules/safe-buffer/index.d.ts create mode 100644 node_modules/registry-auth-token/node_modules/safe-buffer/index.js create mode 100644 node_modules/registry-auth-token/node_modules/safe-buffer/package.json create mode 100644 node_modules/registry-auth-token/package.json create mode 100644 node_modules/registry-auth-token/registry-url.js create mode 100644 node_modules/registry-auth-token/test/auth-token.test.js create mode 100644 node_modules/registry-auth-token/test/registry-url.test.js create mode 100644 node_modules/registry-auth-token/yarn.lock create mode 100644 node_modules/registry-url/index.js create mode 100644 node_modules/registry-url/license create mode 120000 node_modules/registry-url/node_modules/.bin/rc create mode 100644 node_modules/registry-url/package.json create mode 100644 node_modules/registry-url/readme.md create mode 100644 node_modules/remove-trailing-separator/history.md create mode 100644 node_modules/remove-trailing-separator/index.js create mode 100644 node_modules/remove-trailing-separator/license create mode 100644 node_modules/remove-trailing-separator/package.json create mode 100644 node_modules/remove-trailing-separator/readme.md create mode 100644 node_modules/repeat-element/LICENSE create mode 100644 node_modules/repeat-element/README.md create mode 100644 node_modules/repeat-element/index.js create mode 100644 node_modules/repeat-element/package.json create mode 100644 node_modules/repeat-string/LICENSE create mode 100644 node_modules/repeat-string/README.md create mode 100644 node_modules/repeat-string/index.js create mode 100644 node_modules/repeat-string/package.json create mode 100644 node_modules/resolve-url/.jshintrc create mode 100644 node_modules/resolve-url/LICENSE create mode 100644 node_modules/resolve-url/bower.json create mode 100644 node_modules/resolve-url/changelog.md create mode 100644 node_modules/resolve-url/component.json create mode 100644 node_modules/resolve-url/package.json create mode 100644 node_modules/resolve-url/readme.md create mode 100644 node_modules/resolve-url/resolve-url.js create mode 100644 node_modules/resolve-url/test/resolve-url.js create mode 100644 node_modules/ret/LICENSE create mode 100644 node_modules/ret/README.md create mode 100644 node_modules/ret/lib/index.js create mode 100644 node_modules/ret/lib/positions.js create mode 100644 node_modules/ret/lib/sets.js create mode 100644 node_modules/ret/lib/types.js create mode 100644 node_modules/ret/lib/util.js create mode 100644 node_modules/ret/package.json create mode 100644 node_modules/rimraf/LICENSE create mode 100644 node_modules/rimraf/README.md create mode 100755 node_modules/rimraf/bin.js create mode 100644 node_modules/rimraf/package.json create mode 100644 node_modules/rimraf/rimraf.js create mode 100644 node_modules/safe-buffer/LICENSE create mode 100644 node_modules/safe-buffer/README.md create mode 100644 node_modules/safe-buffer/index.d.ts create mode 100644 node_modules/safe-buffer/index.js create mode 100644 node_modules/safe-buffer/package.json create mode 100644 node_modules/safe-regex/.travis.yml create mode 100644 node_modules/safe-regex/LICENSE create mode 100644 node_modules/safe-regex/example/safe.js create mode 100644 node_modules/safe-regex/index.js create mode 100644 node_modules/safe-regex/package.json create mode 100644 node_modules/safe-regex/readme.markdown create mode 100644 node_modules/safe-regex/test/regex.js create mode 100644 node_modules/safer-buffer/LICENSE create mode 100644 node_modules/safer-buffer/Porting-Buffer.md create mode 100644 node_modules/safer-buffer/Readme.md create mode 100644 node_modules/safer-buffer/dangerous.js create mode 100644 node_modules/safer-buffer/package.json create mode 100644 node_modules/safer-buffer/safer.js create mode 100644 node_modules/safer-buffer/tests.js create mode 100644 node_modules/sax/LICENSE create mode 100644 node_modules/sax/README.md create mode 100644 node_modules/sax/lib/sax.js create mode 100644 node_modules/sax/package.json create mode 100644 node_modules/semver-diff/index.js create mode 100644 node_modules/semver-diff/license create mode 120000 node_modules/semver-diff/node_modules/.bin/semver create mode 100644 node_modules/semver-diff/package.json create mode 100644 node_modules/semver-diff/readme.md create mode 100644 node_modules/semver/CHANGELOG.md create mode 100644 node_modules/semver/LICENSE create mode 100644 node_modules/semver/README.md create mode 100755 node_modules/semver/bin/semver create mode 100644 node_modules/semver/package.json create mode 100644 node_modules/semver/range.bnf create mode 100644 node_modules/semver/semver.js create mode 100644 node_modules/send/HISTORY.md create mode 100644 node_modules/send/LICENSE create mode 100644 node_modules/send/README.md create mode 100644 node_modules/send/index.js create mode 120000 node_modules/send/node_modules/.bin/mime create mode 100644 node_modules/send/node_modules/http-errors/HISTORY.md create mode 100644 node_modules/send/node_modules/http-errors/LICENSE create mode 100644 node_modules/send/node_modules/http-errors/README.md create mode 100644 node_modules/send/node_modules/http-errors/index.js create mode 100644 node_modules/send/node_modules/http-errors/package.json create mode 100644 node_modules/send/node_modules/ms/index.js create mode 100644 node_modules/send/node_modules/ms/license.md create mode 100644 node_modules/send/node_modules/ms/package.json create mode 100644 node_modules/send/node_modules/ms/readme.md create mode 100644 node_modules/send/package.json create mode 100644 node_modules/serve-static/HISTORY.md create mode 100644 node_modules/serve-static/LICENSE create mode 100644 node_modules/serve-static/README.md create mode 100644 node_modules/serve-static/index.js create mode 100644 node_modules/serve-static/package.json create mode 100644 node_modules/set-blocking/CHANGELOG.md create mode 100644 node_modules/set-blocking/LICENSE.txt create mode 100644 node_modules/set-blocking/README.md create mode 100644 node_modules/set-blocking/index.js create mode 100644 node_modules/set-blocking/package.json create mode 100644 node_modules/set-value/LICENSE create mode 100644 node_modules/set-value/README.md create mode 100644 node_modules/set-value/index.js create mode 100644 node_modules/set-value/package.json create mode 100644 node_modules/setprototypeof/LICENSE create mode 100644 node_modules/setprototypeof/README.md create mode 100644 node_modules/setprototypeof/index.d.ts create mode 100644 node_modules/setprototypeof/index.js create mode 100644 node_modules/setprototypeof/package.json create mode 100644 node_modules/setprototypeof/test/index.js create mode 100644 node_modules/shebang-command/index.js create mode 100644 node_modules/shebang-command/license create mode 100644 node_modules/shebang-command/package.json create mode 100644 node_modules/shebang-command/readme.md create mode 100644 node_modules/shebang-regex/index.js create mode 100644 node_modules/shebang-regex/license create mode 100644 node_modules/shebang-regex/package.json create mode 100644 node_modules/shebang-regex/readme.md create mode 100644 node_modules/signal-exit/CHANGELOG.md create mode 100644 node_modules/signal-exit/LICENSE.txt create mode 100644 node_modules/signal-exit/README.md create mode 100644 node_modules/signal-exit/index.js create mode 100644 node_modules/signal-exit/package.json create mode 100644 node_modules/signal-exit/signals.js create mode 100644 node_modules/snapdragon-node/LICENSE create mode 100644 node_modules/snapdragon-node/README.md create mode 100644 node_modules/snapdragon-node/index.js create mode 100644 node_modules/snapdragon-node/node_modules/define-property/LICENSE create mode 100644 node_modules/snapdragon-node/node_modules/define-property/README.md create mode 100644 node_modules/snapdragon-node/node_modules/define-property/index.js create mode 100644 node_modules/snapdragon-node/node_modules/define-property/package.json create mode 100644 node_modules/snapdragon-node/package.json create mode 100644 node_modules/snapdragon-util/LICENSE create mode 100644 node_modules/snapdragon-util/README.md create mode 100644 node_modules/snapdragon-util/index.js create mode 100644 node_modules/snapdragon-util/package.json create mode 100644 node_modules/snapdragon/LICENSE create mode 100644 node_modules/snapdragon/README.md create mode 100644 node_modules/snapdragon/index.js create mode 100644 node_modules/snapdragon/lib/compiler.js create mode 100644 node_modules/snapdragon/lib/parser.js create mode 100644 node_modules/snapdragon/lib/position.js create mode 100644 node_modules/snapdragon/lib/source-maps.js create mode 100644 node_modules/snapdragon/lib/utils.js create mode 100644 node_modules/snapdragon/package.json create mode 100644 node_modules/source-map-resolve/.jshintrc create mode 100644 node_modules/source-map-resolve/.travis.yml create mode 100644 node_modules/source-map-resolve/LICENSE create mode 100644 node_modules/source-map-resolve/bower.json create mode 100644 node_modules/source-map-resolve/changelog.md create mode 100644 node_modules/source-map-resolve/component.json create mode 100644 node_modules/source-map-resolve/generate-source-map-resolve.js create mode 100644 node_modules/source-map-resolve/lib/decode-uri-component.js create mode 100644 node_modules/source-map-resolve/lib/resolve-url.js create mode 100644 node_modules/source-map-resolve/lib/source-map-resolve-node.js create mode 120000 node_modules/source-map-resolve/node_modules/.bin/atob create mode 100644 node_modules/source-map-resolve/package.json create mode 100644 node_modules/source-map-resolve/readme.md create mode 100644 node_modules/source-map-resolve/source-map-resolve.js create mode 100644 node_modules/source-map-resolve/source-map-resolve.js.template create mode 100644 node_modules/source-map-resolve/test/common.js create mode 100644 node_modules/source-map-resolve/test/read.js create mode 100644 node_modules/source-map-resolve/test/source-map-resolve.js create mode 100644 node_modules/source-map-resolve/test/windows.js create mode 100644 node_modules/source-map-resolve/x-package.json5 create mode 100644 node_modules/source-map-url/.jshintrc create mode 100644 node_modules/source-map-url/LICENSE create mode 100644 node_modules/source-map-url/bower.json create mode 100644 node_modules/source-map-url/changelog.md create mode 100644 node_modules/source-map-url/component.json create mode 100644 node_modules/source-map-url/package.json create mode 100644 node_modules/source-map-url/readme.md create mode 100644 node_modules/source-map-url/source-map-url.js create mode 100644 node_modules/source-map-url/test/source-map-url.js create mode 100644 node_modules/source-map-url/x-package.json5 create mode 100644 node_modules/source-map/CHANGELOG.md create mode 100644 node_modules/source-map/LICENSE create mode 100644 node_modules/source-map/README.md create mode 100644 node_modules/source-map/dist/source-map.debug.js create mode 100644 node_modules/source-map/dist/source-map.js create mode 100644 node_modules/source-map/dist/source-map.min.js create mode 100644 node_modules/source-map/dist/source-map.min.js.map create mode 100644 node_modules/source-map/lib/array-set.js create mode 100644 node_modules/source-map/lib/base64-vlq.js create mode 100644 node_modules/source-map/lib/base64.js create mode 100644 node_modules/source-map/lib/binary-search.js create mode 100644 node_modules/source-map/lib/mapping-list.js create mode 100644 node_modules/source-map/lib/quick-sort.js create mode 100644 node_modules/source-map/lib/source-map-consumer.js create mode 100644 node_modules/source-map/lib/source-map-generator.js create mode 100644 node_modules/source-map/lib/source-node.js create mode 100644 node_modules/source-map/lib/util.js create mode 100644 node_modules/source-map/package.json create mode 100644 node_modules/source-map/source-map.js create mode 100644 node_modules/split-string/LICENSE create mode 100644 node_modules/split-string/README.md create mode 100644 node_modules/split-string/index.js create mode 100644 node_modules/split-string/node_modules/extend-shallow/LICENSE create mode 100644 node_modules/split-string/node_modules/extend-shallow/README.md create mode 100644 node_modules/split-string/node_modules/extend-shallow/index.js create mode 100644 node_modules/split-string/node_modules/extend-shallow/package.json create mode 100644 node_modules/split-string/node_modules/is-extendable/LICENSE create mode 100644 node_modules/split-string/node_modules/is-extendable/README.md create mode 100644 node_modules/split-string/node_modules/is-extendable/index.d.ts create mode 100644 node_modules/split-string/node_modules/is-extendable/index.js create mode 100644 node_modules/split-string/node_modules/is-extendable/package.json create mode 100644 node_modules/split-string/package.json create mode 100644 node_modules/static-extend/LICENSE create mode 100644 node_modules/static-extend/index.js create mode 100644 node_modules/static-extend/package.json create mode 100644 node_modules/statuses/HISTORY.md create mode 100644 node_modules/statuses/LICENSE create mode 100644 node_modules/statuses/README.md create mode 100644 node_modules/statuses/codes.json create mode 100644 node_modules/statuses/index.js create mode 100644 node_modules/statuses/package.json create mode 100644 node_modules/string-width/index.js create mode 100644 node_modules/string-width/license create mode 100644 node_modules/string-width/node_modules/ansi-regex/index.js create mode 100644 node_modules/string-width/node_modules/ansi-regex/license create mode 100644 node_modules/string-width/node_modules/ansi-regex/package.json create mode 100644 node_modules/string-width/node_modules/ansi-regex/readme.md create mode 100644 node_modules/string-width/node_modules/strip-ansi/index.js create mode 100644 node_modules/string-width/node_modules/strip-ansi/license create mode 100644 node_modules/string-width/node_modules/strip-ansi/package.json create mode 100644 node_modules/string-width/node_modules/strip-ansi/readme.md create mode 100644 node_modules/string-width/package.json create mode 100644 node_modules/string-width/readme.md create mode 100644 node_modules/string_decoder/.travis.yml create mode 100644 node_modules/string_decoder/LICENSE create mode 100644 node_modules/string_decoder/README.md create mode 100644 node_modules/string_decoder/lib/string_decoder.js create mode 100644 node_modules/string_decoder/package.json create mode 100644 node_modules/strip-ansi/index.js create mode 100644 node_modules/strip-ansi/license create mode 100644 node_modules/strip-ansi/package.json create mode 100644 node_modules/strip-ansi/readme.md create mode 100644 node_modules/strip-eof/index.js create mode 100644 node_modules/strip-eof/license create mode 100644 node_modules/strip-eof/package.json create mode 100644 node_modules/strip-eof/readme.md create mode 100644 node_modules/strip-json-comments/index.js create mode 100644 node_modules/strip-json-comments/license create mode 100644 node_modules/strip-json-comments/package.json create mode 100644 node_modules/strip-json-comments/readme.md create mode 100644 node_modules/sucrase/CHANGELOG.md create mode 100644 node_modules/sucrase/LICENSE create mode 100644 node_modules/sucrase/README.md create mode 100755 node_modules/sucrase/bin/sucrase create mode 100755 node_modules/sucrase/bin/sucrase-node create mode 100644 node_modules/sucrase/dist/CJSImportProcessor.d.ts create mode 100644 node_modules/sucrase/dist/CJSImportProcessor.js create mode 100644 node_modules/sucrase/dist/CJSImportProcessor.mjs create mode 100644 node_modules/sucrase/dist/HelperManager.d.ts create mode 100644 node_modules/sucrase/dist/HelperManager.js create mode 100644 node_modules/sucrase/dist/HelperManager.mjs create mode 100644 node_modules/sucrase/dist/NameManager.d.ts create mode 100644 node_modules/sucrase/dist/NameManager.js create mode 100644 node_modules/sucrase/dist/NameManager.mjs create mode 100644 node_modules/sucrase/dist/TokenProcessor.d.ts create mode 100644 node_modules/sucrase/dist/TokenProcessor.js create mode 100644 node_modules/sucrase/dist/TokenProcessor.mjs create mode 100644 node_modules/sucrase/dist/cli.d.ts create mode 100644 node_modules/sucrase/dist/cli.js create mode 100644 node_modules/sucrase/dist/cli.mjs create mode 100644 node_modules/sucrase/dist/computeSourceMap.d.ts create mode 100644 node_modules/sucrase/dist/computeSourceMap.js create mode 100644 node_modules/sucrase/dist/computeSourceMap.mjs create mode 100644 node_modules/sucrase/dist/identifyShadowedGlobals.d.ts create mode 100644 node_modules/sucrase/dist/identifyShadowedGlobals.js create mode 100644 node_modules/sucrase/dist/identifyShadowedGlobals.mjs create mode 100644 node_modules/sucrase/dist/index.d.ts create mode 100644 node_modules/sucrase/dist/index.js create mode 100644 node_modules/sucrase/dist/index.mjs create mode 100644 node_modules/sucrase/dist/parser/index.d.ts create mode 100644 node_modules/sucrase/dist/parser/index.js create mode 100644 node_modules/sucrase/dist/parser/index.mjs create mode 100644 node_modules/sucrase/dist/parser/plugins/flow.d.ts create mode 100644 node_modules/sucrase/dist/parser/plugins/flow.js create mode 100644 node_modules/sucrase/dist/parser/plugins/flow.mjs create mode 100644 node_modules/sucrase/dist/parser/plugins/jsx/index.d.ts create mode 100644 node_modules/sucrase/dist/parser/plugins/jsx/index.js create mode 100644 node_modules/sucrase/dist/parser/plugins/jsx/index.mjs create mode 100644 node_modules/sucrase/dist/parser/plugins/jsx/xhtml.d.ts create mode 100644 node_modules/sucrase/dist/parser/plugins/jsx/xhtml.js create mode 100644 node_modules/sucrase/dist/parser/plugins/jsx/xhtml.mjs create mode 100644 node_modules/sucrase/dist/parser/plugins/types.d.ts create mode 100644 node_modules/sucrase/dist/parser/plugins/types.js create mode 100644 node_modules/sucrase/dist/parser/plugins/types.mjs create mode 100644 node_modules/sucrase/dist/parser/plugins/typescript.d.ts create mode 100644 node_modules/sucrase/dist/parser/plugins/typescript.js create mode 100644 node_modules/sucrase/dist/parser/plugins/typescript.mjs create mode 100644 node_modules/sucrase/dist/parser/tokenizer/index.d.ts create mode 100644 node_modules/sucrase/dist/parser/tokenizer/index.js create mode 100644 node_modules/sucrase/dist/parser/tokenizer/index.mjs create mode 100644 node_modules/sucrase/dist/parser/tokenizer/keywords.d.ts create mode 100644 node_modules/sucrase/dist/parser/tokenizer/keywords.js create mode 100644 node_modules/sucrase/dist/parser/tokenizer/keywords.mjs create mode 100644 node_modules/sucrase/dist/parser/tokenizer/readWord.d.ts create mode 100644 node_modules/sucrase/dist/parser/tokenizer/readWord.js create mode 100644 node_modules/sucrase/dist/parser/tokenizer/readWord.mjs create mode 100644 node_modules/sucrase/dist/parser/tokenizer/readWordTree.d.ts create mode 100644 node_modules/sucrase/dist/parser/tokenizer/readWordTree.js create mode 100644 node_modules/sucrase/dist/parser/tokenizer/readWordTree.mjs create mode 100644 node_modules/sucrase/dist/parser/tokenizer/state.d.ts create mode 100644 node_modules/sucrase/dist/parser/tokenizer/state.js create mode 100644 node_modules/sucrase/dist/parser/tokenizer/state.mjs create mode 100644 node_modules/sucrase/dist/parser/tokenizer/types.d.ts create mode 100644 node_modules/sucrase/dist/parser/tokenizer/types.js create mode 100644 node_modules/sucrase/dist/parser/tokenizer/types.mjs create mode 100644 node_modules/sucrase/dist/parser/traverser/base.d.ts create mode 100644 node_modules/sucrase/dist/parser/traverser/base.js create mode 100644 node_modules/sucrase/dist/parser/traverser/base.mjs create mode 100644 node_modules/sucrase/dist/parser/traverser/expression.d.ts create mode 100644 node_modules/sucrase/dist/parser/traverser/expression.js create mode 100644 node_modules/sucrase/dist/parser/traverser/expression.mjs create mode 100644 node_modules/sucrase/dist/parser/traverser/index.d.ts create mode 100644 node_modules/sucrase/dist/parser/traverser/index.js create mode 100644 node_modules/sucrase/dist/parser/traverser/index.mjs create mode 100644 node_modules/sucrase/dist/parser/traverser/lval.d.ts create mode 100644 node_modules/sucrase/dist/parser/traverser/lval.js create mode 100644 node_modules/sucrase/dist/parser/traverser/lval.mjs create mode 100644 node_modules/sucrase/dist/parser/traverser/statement.d.ts create mode 100644 node_modules/sucrase/dist/parser/traverser/statement.js create mode 100644 node_modules/sucrase/dist/parser/traverser/statement.mjs create mode 100644 node_modules/sucrase/dist/parser/traverser/util.d.ts create mode 100644 node_modules/sucrase/dist/parser/traverser/util.js create mode 100644 node_modules/sucrase/dist/parser/traverser/util.mjs create mode 100644 node_modules/sucrase/dist/parser/util/charcodes.d.ts create mode 100644 node_modules/sucrase/dist/parser/util/charcodes.js create mode 100644 node_modules/sucrase/dist/parser/util/charcodes.mjs create mode 100644 node_modules/sucrase/dist/parser/util/identifier.d.ts create mode 100644 node_modules/sucrase/dist/parser/util/identifier.js create mode 100644 node_modules/sucrase/dist/parser/util/identifier.mjs create mode 100644 node_modules/sucrase/dist/parser/util/whitespace.d.ts create mode 100644 node_modules/sucrase/dist/parser/util/whitespace.js create mode 100644 node_modules/sucrase/dist/parser/util/whitespace.mjs create mode 100644 node_modules/sucrase/dist/register.d.ts create mode 100644 node_modules/sucrase/dist/register.js create mode 100644 node_modules/sucrase/dist/register.mjs create mode 100644 node_modules/sucrase/dist/transformers/CJSImportTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/CJSImportTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/CJSImportTransformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/ESMImportTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/ESMImportTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/ESMImportTransformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/FlowTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/FlowTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/FlowTransformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/JSXTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/JSXTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/JSXTransformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/RootTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/RootTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/RootTransformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/Transformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/Transformer.js create mode 100644 node_modules/sucrase/dist/transformers/Transformer.mjs create mode 100644 node_modules/sucrase/dist/transformers/TypeScriptTransformer.d.ts create mode 100644 node_modules/sucrase/dist/transformers/TypeScriptTransformer.js create mode 100644 node_modules/sucrase/dist/transformers/TypeScriptTransformer.mjs create mode 100644 node_modules/sucrase/dist/util/elideImportEquals.d.ts create mode 100644 node_modules/sucrase/dist/util/elideImportEquals.js create mode 100644 node_modules/sucrase/dist/util/elideImportEquals.mjs create mode 100644 node_modules/sucrase/dist/util/formatTokens.d.ts create mode 100644 node_modules/sucrase/dist/util/formatTokens.js create mode 100644 node_modules/sucrase/dist/util/formatTokens.mjs create mode 100644 node_modules/sucrase/dist/util/getClassInfo.d.ts create mode 100644 node_modules/sucrase/dist/util/getClassInfo.js create mode 100644 node_modules/sucrase/dist/util/getClassInfo.mjs create mode 100644 node_modules/sucrase/dist/util/getDeclarationInfo.d.ts create mode 100644 node_modules/sucrase/dist/util/getDeclarationInfo.js create mode 100644 node_modules/sucrase/dist/util/getDeclarationInfo.mjs create mode 100644 node_modules/sucrase/dist/util/getJSXPragmaInfo.d.ts create mode 100644 node_modules/sucrase/dist/util/getJSXPragmaInfo.js create mode 100644 node_modules/sucrase/dist/util/getJSXPragmaInfo.mjs create mode 100644 node_modules/sucrase/dist/util/getNonTypeIdentifiers.d.ts create mode 100644 node_modules/sucrase/dist/util/getNonTypeIdentifiers.js create mode 100644 node_modules/sucrase/dist/util/getNonTypeIdentifiers.mjs create mode 100644 node_modules/sucrase/dist/util/getTSImportedNames.d.ts create mode 100644 node_modules/sucrase/dist/util/getTSImportedNames.js create mode 100644 node_modules/sucrase/dist/util/getTSImportedNames.mjs create mode 100644 node_modules/sucrase/dist/util/isIdentifier.d.ts create mode 100644 node_modules/sucrase/dist/util/isIdentifier.js create mode 100644 node_modules/sucrase/dist/util/isIdentifier.mjs create mode 100644 node_modules/sucrase/dist/util/shouldElideDefaultExport.d.ts create mode 100644 node_modules/sucrase/dist/util/shouldElideDefaultExport.js create mode 100644 node_modules/sucrase/dist/util/shouldElideDefaultExport.mjs create mode 100644 node_modules/sucrase/package.json create mode 100644 node_modules/sucrase/register/index.js create mode 100644 node_modules/sucrase/register/js.js create mode 100644 node_modules/sucrase/register/jsx.js create mode 100644 node_modules/sucrase/register/ts-legacy-module-interop.js create mode 100644 node_modules/sucrase/register/ts.js create mode 100644 node_modules/sucrase/register/tsx-legacy-module-interop.js create mode 100644 node_modules/sucrase/register/tsx.js create mode 100644 node_modules/supports-color/browser.js create mode 100644 node_modules/supports-color/index.js create mode 100644 node_modules/supports-color/license create mode 100644 node_modules/supports-color/package.json create mode 100644 node_modules/supports-color/readme.md create mode 100644 node_modules/tar/LICENSE create mode 100644 node_modules/tar/README.md create mode 100644 node_modules/tar/index.js create mode 100644 node_modules/tar/lib/.mkdir.js.swp create mode 100644 node_modules/tar/lib/buffer.js create mode 100644 node_modules/tar/lib/create.js create mode 100644 node_modules/tar/lib/extract.js create mode 100644 node_modules/tar/lib/header.js create mode 100644 node_modules/tar/lib/high-level-opt.js create mode 100644 node_modules/tar/lib/large-numbers.js create mode 100644 node_modules/tar/lib/list.js create mode 100644 node_modules/tar/lib/mkdir.js create mode 100644 node_modules/tar/lib/mode-fix.js create mode 100644 node_modules/tar/lib/pack.js create mode 100644 node_modules/tar/lib/parse.js create mode 100644 node_modules/tar/lib/pax.js create mode 100644 node_modules/tar/lib/read-entry.js create mode 100644 node_modules/tar/lib/replace.js create mode 100644 node_modules/tar/lib/types.js create mode 100644 node_modules/tar/lib/unpack.js create mode 100644 node_modules/tar/lib/update.js create mode 100644 node_modules/tar/lib/warn-mixin.js create mode 100644 node_modules/tar/lib/winchars.js create mode 100644 node_modules/tar/lib/write-entry.js create mode 120000 node_modules/tar/node_modules/.bin/mkdirp create mode 100644 node_modules/tar/node_modules/safe-buffer/LICENSE create mode 100644 node_modules/tar/node_modules/safe-buffer/README.md create mode 100644 node_modules/tar/node_modules/safe-buffer/index.d.ts create mode 100644 node_modules/tar/node_modules/safe-buffer/index.js create mode 100644 node_modules/tar/node_modules/safe-buffer/package.json create mode 100644 node_modules/tar/package.json create mode 100644 node_modules/term-size/index.js create mode 100644 node_modules/term-size/license create mode 100644 node_modules/term-size/package.json create mode 100644 node_modules/term-size/readme.md create mode 100755 node_modules/term-size/vendor/macos/term-size create mode 100644 node_modules/term-size/vendor/windows/term-size.exe create mode 100644 node_modules/thenify-all/History.md create mode 100644 node_modules/thenify-all/LICENSE create mode 100644 node_modules/thenify-all/README.md create mode 100644 node_modules/thenify-all/index.js create mode 100644 node_modules/thenify-all/package.json create mode 100644 node_modules/thenify/History.md create mode 100644 node_modules/thenify/LICENSE create mode 100644 node_modules/thenify/README.md create mode 100644 node_modules/thenify/index.js create mode 100644 node_modules/thenify/package.json create mode 100644 node_modules/timed-out/index.js create mode 100644 node_modules/timed-out/license create mode 100644 node_modules/timed-out/package.json create mode 100644 node_modules/timed-out/readme.md create mode 100644 node_modules/to-object-path/LICENSE create mode 100644 node_modules/to-object-path/README.md create mode 100644 node_modules/to-object-path/index.js create mode 100644 node_modules/to-object-path/package.json create mode 100644 node_modules/to-regex-range/LICENSE create mode 100644 node_modules/to-regex-range/README.md create mode 100644 node_modules/to-regex-range/index.js create mode 100644 node_modules/to-regex-range/package.json create mode 100644 node_modules/to-regex/LICENSE create mode 100644 node_modules/to-regex/README.md create mode 100644 node_modules/to-regex/index.js create mode 100644 node_modules/to-regex/node_modules/define-property/CHANGELOG.md create mode 100644 node_modules/to-regex/node_modules/define-property/LICENSE create mode 100644 node_modules/to-regex/node_modules/define-property/README.md create mode 100644 node_modules/to-regex/node_modules/define-property/index.js create mode 100644 node_modules/to-regex/node_modules/define-property/package.json create mode 100644 node_modules/to-regex/node_modules/extend-shallow/LICENSE create mode 100644 node_modules/to-regex/node_modules/extend-shallow/README.md create mode 100644 node_modules/to-regex/node_modules/extend-shallow/index.js create mode 100644 node_modules/to-regex/node_modules/extend-shallow/package.json create mode 100644 node_modules/to-regex/node_modules/is-extendable/LICENSE create mode 100644 node_modules/to-regex/node_modules/is-extendable/README.md create mode 100644 node_modules/to-regex/node_modules/is-extendable/index.d.ts create mode 100644 node_modules/to-regex/node_modules/is-extendable/index.js create mode 100644 node_modules/to-regex/node_modules/is-extendable/package.json create mode 100644 node_modules/to-regex/package.json create mode 100644 node_modules/toidentifier/LICENSE create mode 100644 node_modules/toidentifier/README.md create mode 100644 node_modules/toidentifier/index.js create mode 100644 node_modules/toidentifier/package.json create mode 100644 node_modules/touch/LICENSE create mode 100644 node_modules/touch/README.md create mode 100755 node_modules/touch/bin/nodetouch.js create mode 100644 node_modules/touch/index.js create mode 120000 node_modules/touch/node_modules/.bin/nopt create mode 100644 node_modules/touch/package.json create mode 100644 node_modules/type-is/HISTORY.md create mode 100644 node_modules/type-is/LICENSE create mode 100644 node_modules/type-is/README.md create mode 100644 node_modules/type-is/index.js create mode 100644 node_modules/type-is/package.json create mode 100644 node_modules/undefsafe/.jscsrc create mode 100644 node_modules/undefsafe/.jshintrc create mode 100644 node_modules/undefsafe/.npmignore create mode 100644 node_modules/undefsafe/.travis.yml create mode 100644 node_modules/undefsafe/LICENSE create mode 100644 node_modules/undefsafe/README.md create mode 100644 node_modules/undefsafe/example.js create mode 100644 node_modules/undefsafe/lib/undefsafe.js create mode 100644 node_modules/undefsafe/package.json create mode 100644 node_modules/union-value/LICENSE create mode 100644 node_modules/union-value/README.md create mode 100644 node_modules/union-value/index.js create mode 100644 node_modules/union-value/package.json create mode 100644 node_modules/unique-string/index.js create mode 100644 node_modules/unique-string/license create mode 100644 node_modules/unique-string/package.json create mode 100644 node_modules/unique-string/readme.md create mode 100644 node_modules/unpipe/HISTORY.md create mode 100644 node_modules/unpipe/LICENSE create mode 100644 node_modules/unpipe/README.md create mode 100644 node_modules/unpipe/index.js create mode 100644 node_modules/unpipe/package.json create mode 100644 node_modules/unset-value/LICENSE create mode 100644 node_modules/unset-value/README.md create mode 100644 node_modules/unset-value/index.js create mode 100644 node_modules/unset-value/node_modules/has-value/LICENSE create mode 100644 node_modules/unset-value/node_modules/has-value/README.md create mode 100644 node_modules/unset-value/node_modules/has-value/index.js create mode 100644 node_modules/unset-value/node_modules/has-value/node_modules/isobject/LICENSE create mode 100644 node_modules/unset-value/node_modules/has-value/node_modules/isobject/README.md create mode 100644 node_modules/unset-value/node_modules/has-value/node_modules/isobject/index.js create mode 100644 node_modules/unset-value/node_modules/has-value/node_modules/isobject/package.json create mode 100644 node_modules/unset-value/node_modules/has-value/package.json create mode 100644 node_modules/unset-value/node_modules/has-values/LICENSE create mode 100644 node_modules/unset-value/node_modules/has-values/README.md create mode 100644 node_modules/unset-value/node_modules/has-values/index.js create mode 100644 node_modules/unset-value/node_modules/has-values/package.json create mode 100644 node_modules/unset-value/package.json create mode 100644 node_modules/unzip-response/index.js create mode 100644 node_modules/unzip-response/license create mode 100644 node_modules/unzip-response/package.json create mode 100644 node_modules/unzip-response/readme.md create mode 100755 node_modules/upath/LICENSE create mode 100644 node_modules/upath/build/code/upath.js create mode 100644 node_modules/upath/package.json create mode 100644 node_modules/upath/readme.md create mode 100644 node_modules/upath/upath.d.ts create mode 100644 node_modules/update-notifier/check.js create mode 100644 node_modules/update-notifier/index.js create mode 100644 node_modules/update-notifier/license create mode 120000 node_modules/update-notifier/node_modules/.bin/is-ci create mode 100644 node_modules/update-notifier/package.json create mode 100644 node_modules/update-notifier/readme.md create mode 100644 node_modules/urix/.jshintrc create mode 100644 node_modules/urix/LICENSE create mode 100644 node_modules/urix/index.js create mode 100644 node_modules/urix/package.json create mode 100644 node_modules/urix/readme.md create mode 100644 node_modules/urix/test/index.js create mode 100644 node_modules/url-parse-lax/index.js create mode 100644 node_modules/url-parse-lax/license create mode 100644 node_modules/url-parse-lax/package.json create mode 100644 node_modules/url-parse-lax/readme.md create mode 100644 node_modules/use/LICENSE create mode 100644 node_modules/use/README.md create mode 100644 node_modules/use/index.js create mode 100644 node_modules/use/package.json create mode 100644 node_modules/util-deprecate/History.md create mode 100644 node_modules/util-deprecate/LICENSE create mode 100644 node_modules/util-deprecate/README.md create mode 100644 node_modules/util-deprecate/browser.js create mode 100644 node_modules/util-deprecate/node.js create mode 100644 node_modules/util-deprecate/package.json create mode 100644 node_modules/utils-merge/.npmignore create mode 100644 node_modules/utils-merge/LICENSE create mode 100644 node_modules/utils-merge/README.md create mode 100644 node_modules/utils-merge/index.js create mode 100644 node_modules/utils-merge/package.json create mode 100644 node_modules/vary/HISTORY.md create mode 100644 node_modules/vary/LICENSE create mode 100644 node_modules/vary/README.md create mode 100644 node_modules/vary/index.js create mode 100644 node_modules/vary/package.json create mode 100644 node_modules/which/CHANGELOG.md create mode 100644 node_modules/which/LICENSE create mode 100644 node_modules/which/README.md create mode 100755 node_modules/which/bin/which create mode 100644 node_modules/which/package.json create mode 100644 node_modules/which/which.js create mode 100644 node_modules/wide-align/LICENSE create mode 100644 node_modules/wide-align/README.md create mode 100644 node_modules/wide-align/align.js create mode 100644 node_modules/wide-align/package.json create mode 100644 node_modules/widest-line/index.js create mode 100644 node_modules/widest-line/license create mode 100644 node_modules/widest-line/package.json create mode 100644 node_modules/widest-line/readme.md create mode 100644 node_modules/wrappy/LICENSE create mode 100644 node_modules/wrappy/README.md create mode 100644 node_modules/wrappy/package.json create mode 100644 node_modules/wrappy/wrappy.js create mode 100644 node_modules/write-file-atomic/CHANGELOG.md create mode 100644 node_modules/write-file-atomic/LICENSE create mode 100644 node_modules/write-file-atomic/README.md create mode 100644 node_modules/write-file-atomic/index.js create mode 100644 node_modules/write-file-atomic/package.json create mode 100644 node_modules/xdg-basedir/index.js create mode 100644 node_modules/xdg-basedir/license create mode 100644 node_modules/xdg-basedir/package.json create mode 100644 node_modules/xdg-basedir/readme.md create mode 100644 node_modules/yallist/LICENSE create mode 100644 node_modules/yallist/README.md create mode 100644 node_modules/yallist/iterator.js create mode 100644 node_modules/yallist/package.json create mode 100644 node_modules/yallist/yallist.js create mode 100644 nodemon.json create mode 100644 package.json create mode 100644 src/app.js create mode 100644 src/routes.js create mode 100644 src/server.js create mode 100644 yarn.lock diff --git a/node_modules/.bin/atob b/node_modules/.bin/atob new file mode 120000 index 00000000..a68344a3 --- /dev/null +++ b/node_modules/.bin/atob @@ -0,0 +1 @@ +../atob/bin/atob.js \ No newline at end of file diff --git a/node_modules/.bin/detect-libc b/node_modules/.bin/detect-libc new file mode 120000 index 00000000..b4c4b767 --- /dev/null +++ b/node_modules/.bin/detect-libc @@ -0,0 +1 @@ +../detect-libc/bin/detect-libc.js \ No newline at end of file diff --git a/node_modules/.bin/is-ci b/node_modules/.bin/is-ci new file mode 120000 index 00000000..fe6aca6f --- /dev/null +++ b/node_modules/.bin/is-ci @@ -0,0 +1 @@ +../is-ci/bin.js \ No newline at end of file diff --git a/node_modules/.bin/mime b/node_modules/.bin/mime new file mode 120000 index 00000000..fbb7ee0e --- /dev/null +++ b/node_modules/.bin/mime @@ -0,0 +1 @@ +../mime/cli.js \ No newline at end of file diff --git a/node_modules/.bin/mkdirp b/node_modules/.bin/mkdirp new file mode 120000 index 00000000..017896ce --- /dev/null +++ b/node_modules/.bin/mkdirp @@ -0,0 +1 @@ +../mkdirp/bin/cmd.js \ No newline at end of file diff --git a/node_modules/.bin/needle b/node_modules/.bin/needle new file mode 120000 index 00000000..82809694 --- /dev/null +++ b/node_modules/.bin/needle @@ -0,0 +1 @@ +../needle/bin/needle \ No newline at end of file diff --git a/node_modules/.bin/node-pre-gyp b/node_modules/.bin/node-pre-gyp new file mode 120000 index 00000000..47a90a51 --- /dev/null +++ b/node_modules/.bin/node-pre-gyp @@ -0,0 +1 @@ +../node-pre-gyp/bin/node-pre-gyp \ No newline at end of file diff --git a/node_modules/.bin/nodemon b/node_modules/.bin/nodemon new file mode 120000 index 00000000..1056ddc1 --- /dev/null +++ b/node_modules/.bin/nodemon @@ -0,0 +1 @@ +../nodemon/bin/nodemon.js \ No newline at end of file diff --git a/node_modules/.bin/nodetouch b/node_modules/.bin/nodetouch new file mode 120000 index 00000000..3409fdb7 --- /dev/null +++ b/node_modules/.bin/nodetouch @@ -0,0 +1 @@ +../touch/bin/nodetouch.js \ No newline at end of file diff --git a/node_modules/.bin/nopt b/node_modules/.bin/nopt new file mode 120000 index 00000000..94208576 --- /dev/null +++ b/node_modules/.bin/nopt @@ -0,0 +1 @@ +../node-pre-gyp/node_modules/nopt/bin/nopt.js \ No newline at end of file diff --git a/node_modules/.bin/rc b/node_modules/.bin/rc new file mode 120000 index 00000000..48b3cda7 --- /dev/null +++ b/node_modules/.bin/rc @@ -0,0 +1 @@ +../rc/cli.js \ No newline at end of file diff --git a/node_modules/.bin/rimraf b/node_modules/.bin/rimraf new file mode 120000 index 00000000..4cd49a49 --- /dev/null +++ b/node_modules/.bin/rimraf @@ -0,0 +1 @@ +../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 120000 index 00000000..317eb293 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver \ No newline at end of file diff --git a/node_modules/.bin/sucrase b/node_modules/.bin/sucrase new file mode 120000 index 00000000..0ac7e775 --- /dev/null +++ b/node_modules/.bin/sucrase @@ -0,0 +1 @@ +../sucrase/bin/sucrase \ No newline at end of file diff --git a/node_modules/.bin/sucrase-node b/node_modules/.bin/sucrase-node new file mode 120000 index 00000000..8b96fae2 --- /dev/null +++ b/node_modules/.bin/sucrase-node @@ -0,0 +1 @@ +../sucrase/bin/sucrase-node \ No newline at end of file diff --git a/node_modules/.bin/which b/node_modules/.bin/which new file mode 120000 index 00000000..f62471c8 --- /dev/null +++ b/node_modules/.bin/which @@ -0,0 +1 @@ +../which/bin/which \ No newline at end of file diff --git a/node_modules/.yarn-integrity b/node_modules/.yarn-integrity new file mode 100644 index 00000000..26f7f5ab --- /dev/null +++ b/node_modules/.yarn-integrity @@ -0,0 +1,368 @@ +{ + "systemParams": "darwin-x64-72", + "modulesFolders": [ + "node_modules" + ], + "flags": [], + "linkedModules": [], + "topLevelPatterns": [ + "express@^4.17.1", + "nodemon@^1.19.1", + "sucrase@^3.10.1" + ], + "lockfileEntries": { + "abbrev@1": "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8", + "accepts@~1.3.7": "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd", + "ansi-align@^2.0.0": "https://registry.yarnpkg.com/ansi-align/-/ansi-align-2.0.0.tgz#c36aeccba563b89ceb556f3690f0b1d9e3547f7f", + "ansi-regex@^2.0.0": "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df", + "ansi-regex@^3.0.0": "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998", + "ansi-styles@^3.2.1": "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d", + "any-promise@^1.0.0": "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f", + "anymatch@^2.0.0": "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb", + "aproba@^1.0.3": "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a", + "are-we-there-yet@~1.1.2": "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21", + "arr-diff@^4.0.0": "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520", + "arr-flatten@^1.1.0": "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1", + "arr-union@^3.1.0": "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4", + "array-flatten@1.1.1": "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2", + "array-unique@^0.3.2": "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428", + "assign-symbols@^1.0.0": "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367", + "async-each@^1.0.1": "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf", + "atob@^2.1.1": "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9", + "balanced-match@^1.0.0": "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767", + "base@^0.11.1": "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f", + "binary-extensions@^1.0.0": "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65", + "body-parser@1.19.0": "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a", + "boxen@^1.2.1": "https://registry.yarnpkg.com/boxen/-/boxen-1.3.0.tgz#55c6c39a8ba58d9c61ad22cd877532deb665a20b", + "brace-expansion@^1.1.7": "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd", + "braces@^2.3.1": "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729", + "braces@^2.3.2": "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729", + "bytes@3.1.0": "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6", + "cache-base@^1.0.1": "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2", + "camelcase@^4.0.0": "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd", + "capture-stack-trace@^1.0.0": "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz#a6c0bbe1f38f3aa0b92238ecb6ff42c344d4135d", + "chalk@^2.0.1": "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424", + "chokidar@^2.1.5": "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.6.tgz#b6cad653a929e244ce8a834244164d241fa954c5", + "chownr@^1.1.1": "https://registry.yarnpkg.com/chownr/-/chownr-1.1.2.tgz#a18f1e0b269c8a6a5d3c86eb298beb14c3dd7bf6", + "ci-info@^1.5.0": "https://registry.yarnpkg.com/ci-info/-/ci-info-1.6.0.tgz#2ca20dbb9ceb32d4524a683303313f0304b1e497", + "class-utils@^0.3.5": "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463", + "cli-boxes@^1.0.0": "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143", + "code-point-at@^1.0.0": "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77", + "collection-visit@^1.0.0": "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0", + "color-convert@^1.9.0": "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8", + "color-name@1.1.3": "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25", + "commander@^2.19.0": "https://registry.yarnpkg.com/commander/-/commander-2.20.0.tgz#d58bb2b5c1ee8f87b0d340027e9e94e222c5a422", + "component-emitter@^1.2.1": "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0", + "concat-map@0.0.1": "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b", + "configstore@^3.0.0": "https://registry.yarnpkg.com/configstore/-/configstore-3.1.2.tgz#c6f25defaeef26df12dd33414b001fe81a543f8f", + "console-control-strings@^1.0.0": "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e", + "console-control-strings@~1.1.0": "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e", + "content-disposition@0.5.3": "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd", + "content-type@~1.0.4": "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b", + "cookie-signature@1.0.6": "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c", + "cookie@0.4.0": "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba", + "copy-descriptor@^0.1.0": "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d", + "core-util-is@~1.0.0": "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7", + "create-error-class@^3.0.0": "https://registry.yarnpkg.com/create-error-class/-/create-error-class-3.0.2.tgz#06be7abef947a3f14a30fd610671d401bca8b7b6", + "cross-spawn@^5.0.1": "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449", + "crypto-random-string@^1.0.0": "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e", + "debug@2.6.9": "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f", + "debug@^2.2.0": "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f", + "debug@^2.3.3": "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f", + "debug@^3.1.0": "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b", + "debug@^3.2.6": "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b", + "decode-uri-component@^0.2.0": "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545", + "deep-extend@^0.6.0": "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac", + "define-property@^0.2.5": "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116", + "define-property@^1.0.0": "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6", + "define-property@^2.0.2": "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d", + "delegates@^1.0.0": "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a", + "depd@~1.1.2": "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9", + "destroy@~1.0.4": "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80", + "detect-libc@^1.0.2": "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b", + "dot-prop@^4.1.0": "https://registry.yarnpkg.com/dot-prop/-/dot-prop-4.2.0.tgz#1f19e0c2e1aa0e32797c49799f2837ac6af69c57", + "duplexer3@^0.1.4": "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2", + "ee-first@1.1.1": "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d", + "encodeurl@~1.0.2": "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59", + "escape-html@~1.0.3": "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988", + "escape-string-regexp@^1.0.5": "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4", + "etag@~1.8.1": "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887", + "execa@^0.7.0": "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777", + "expand-brackets@^2.1.4": "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622", + "express@^4.17.1": "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134", + "extend-shallow@^2.0.1": "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f", + "extend-shallow@^3.0.0": "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8", + "extend-shallow@^3.0.2": "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8", + "extglob@^2.0.4": "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543", + "fill-range@^4.0.0": "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7", + "finalhandler@~1.1.2": "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d", + "for-in@^1.0.2": "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80", + "forwarded@~0.1.2": "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84", + "fragment-cache@^0.2.1": "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19", + "fresh@0.5.2": "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7", + "fs-minipass@^1.2.5": "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.6.tgz#2c5cc30ded81282bfe8a0d7c7c1853ddeb102c07", + "fs.realpath@^1.0.0": "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f", + "fsevents@^1.2.7": "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f", + "gauge@~2.7.3": "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7", + "get-stream@^3.0.0": "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14", + "get-value@^2.0.3": "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28", + "get-value@^2.0.6": "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28", + "glob-parent@^3.1.0": "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae", + "glob@^7.1.3": "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255", + "global-dirs@^0.1.0": "https://registry.yarnpkg.com/global-dirs/-/global-dirs-0.1.1.tgz#b319c0dd4607f353f3be9cca4c72fc148c49f445", + "got@^6.7.1": "https://registry.yarnpkg.com/got/-/got-6.7.1.tgz#240cd05785a9a18e561dc1b44b41c763ef1e8db0", + "graceful-fs@^4.1.11": "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b", + "graceful-fs@^4.1.2": "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b", + "has-flag@^3.0.0": "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd", + "has-unicode@^2.0.0": "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9", + "has-value@^0.3.1": "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f", + "has-value@^1.0.0": "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177", + "has-values@^0.1.4": "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771", + "has-values@^1.0.0": "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f", + "http-errors@1.7.2": "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f", + "http-errors@~1.7.2": "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06", + "iconv-lite@0.4.24": "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b", + "iconv-lite@^0.4.4": "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b", + "ignore-by-default@^1.0.1": "https://registry.yarnpkg.com/ignore-by-default/-/ignore-by-default-1.0.1.tgz#48ca6d72f6c6a3af00a9ad4ae6876be3889e2b09", + "ignore-walk@^3.0.1": "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.1.tgz#a83e62e7d272ac0e3b551aaa82831a19b69f82f8", + "import-lazy@^2.1.0": "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43", + "imurmurhash@^0.1.4": "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea", + "inflight@^1.0.4": "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9", + "inherits@2": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c", + "inherits@2.0.3": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de", + "inherits@2.0.4": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c", + "inherits@^2.0.3": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c", + "inherits@~2.0.3": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c", + "ini@^1.3.4": "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927", + "ini@~1.3.0": "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927", + "ipaddr.js@1.9.0": "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65", + "is-accessor-descriptor@^0.1.6": "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6", + "is-accessor-descriptor@^1.0.0": "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656", + "is-binary-path@^1.0.0": "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898", + "is-buffer@^1.1.5": "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be", + "is-ci@^1.0.10": "https://registry.yarnpkg.com/is-ci/-/is-ci-1.2.1.tgz#e3779c8ee17fccf428488f6e281187f2e632841c", + "is-data-descriptor@^0.1.4": "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56", + "is-data-descriptor@^1.0.0": "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7", + "is-descriptor@^0.1.0": "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca", + "is-descriptor@^1.0.0": "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec", + "is-descriptor@^1.0.2": "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec", + "is-extendable@^0.1.0": "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89", + "is-extendable@^0.1.1": "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89", + "is-extendable@^1.0.1": "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4", + "is-extglob@^2.1.0": "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2", + "is-extglob@^2.1.1": "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2", + "is-fullwidth-code-point@^1.0.0": "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb", + "is-fullwidth-code-point@^2.0.0": "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f", + "is-glob@^3.1.0": "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a", + "is-glob@^4.0.0": "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc", + "is-installed-globally@^0.1.0": "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.1.0.tgz#0dfd98f5a9111716dd535dda6492f67bf3d25a80", + "is-npm@^1.0.0": "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4", + "is-number@^3.0.0": "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195", + "is-obj@^1.0.0": "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f", + "is-path-inside@^1.0.0": "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.1.tgz#8ef5b7de50437a3fdca6b4e865ef7aa55cb48036", + "is-plain-object@^2.0.3": "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677", + "is-plain-object@^2.0.4": "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677", + "is-redirect@^1.0.0": "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24", + "is-retry-allowed@^1.0.0": "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34", + "is-stream@^1.0.0": "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44", + "is-stream@^1.1.0": "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44", + "is-windows@^1.0.2": "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d", + "isarray@1.0.0": "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11", + "isarray@~1.0.0": "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11", + "isexe@^2.0.0": "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10", + "isobject@^2.0.0": "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89", + "isobject@^3.0.0": "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df", + "isobject@^3.0.1": "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df", + "kind-of@^3.0.2": "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64", + "kind-of@^3.0.3": "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64", + "kind-of@^3.2.0": "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64", + "kind-of@^4.0.0": "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57", + "kind-of@^5.0.0": "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d", + "kind-of@^6.0.0": "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051", + "kind-of@^6.0.2": "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051", + "latest-version@^3.0.0": "https://registry.yarnpkg.com/latest-version/-/latest-version-3.1.0.tgz#a205383fea322b33b5ae3b18abee0dc2f356ee15", + "lines-and-columns@^1.1.6": "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00", + "lowercase-keys@^1.0.0": "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f", + "lru-cache@^4.0.1": "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd", + "make-dir@^1.0.0": "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c", + "map-cache@^0.2.2": "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf", + "map-visit@^1.0.0": "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f", + "media-typer@0.3.0": "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748", + "merge-descriptors@1.0.1": "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61", + "methods@~1.1.2": "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee", + "micromatch@^3.1.10": "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23", + "micromatch@^3.1.4": "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23", + "mime-db@1.40.0": "https://registry.yarnpkg.com/mime-db/-/mime-db-1.40.0.tgz#a65057e998db090f732a68f6c276d387d4126c32", + "mime-types@~2.1.24": "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.24.tgz#b6f8d0b3e951efb77dedeca194cff6d16f676f81", + "mime@1.6.0": "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1", + "minimatch@^3.0.4": "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083", + "minimist@0.0.8": "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d", + "minimist@^1.2.0": "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284", + "minipass@^2.2.1": "https://registry.yarnpkg.com/minipass/-/minipass-2.3.5.tgz#cacebe492022497f656b0f0f51e2682a9ed2d848", + "minipass@^2.3.5": "https://registry.yarnpkg.com/minipass/-/minipass-2.3.5.tgz#cacebe492022497f656b0f0f51e2682a9ed2d848", + "minizlib@^1.2.1": "https://registry.yarnpkg.com/minizlib/-/minizlib-1.2.1.tgz#dd27ea6136243c7c880684e8672bb3a45fd9b614", + "mixin-deep@^1.2.0": "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566", + "mkdirp@^0.5.0": "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903", + "mkdirp@^0.5.1": "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903", + "ms@2.0.0": "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8", + "ms@2.1.1": "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a", + "ms@^2.1.1": "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009", + "mz@^2.7.0": "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32", + "nan@^2.12.1": "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c", + "nanomatch@^1.2.9": "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119", + "needle@^2.2.1": "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c", + "negotiator@0.6.2": "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb", + "node-modules-regexp@^1.0.0": "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40", + "node-pre-gyp@^0.12.0": "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149", + "nodemon@^1.19.1": "https://registry.yarnpkg.com/nodemon/-/nodemon-1.19.1.tgz#576f0aad0f863aabf8c48517f6192ff987cd5071", + "nopt@^4.0.1": "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d", + "nopt@~1.0.10": "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee", + "normalize-path@^2.1.1": "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9", + "normalize-path@^3.0.0": "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65", + "npm-bundled@^1.0.1": "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd", + "npm-packlist@^1.1.6": "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.4.tgz#866224233850ac534b63d1a6e76050092b5d2f44", + "npm-run-path@^2.0.0": "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f", + "npmlog@^4.0.2": "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b", + "number-is-nan@^1.0.0": "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d", + "object-assign@^4.0.1": "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863", + "object-assign@^4.1.0": "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863", + "object-copy@^0.1.0": "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c", + "object-visit@^1.0.0": "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb", + "object.pick@^1.3.0": "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747", + "on-finished@~2.3.0": "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947", + "once@^1.3.0": "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1", + "os-homedir@^1.0.0": "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3", + "os-tmpdir@^1.0.0": "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274", + "osenv@^0.1.4": "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410", + "p-finally@^1.0.0": "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae", + "package-json@^4.0.0": "https://registry.yarnpkg.com/package-json/-/package-json-4.0.1.tgz#8869a0401253661c4c4ca3da6c2121ed555f5eed", + "parseurl@~1.3.3": "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4", + "pascalcase@^0.1.1": "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14", + "path-dirname@^1.0.0": "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0", + "path-is-absolute@^1.0.0": "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f", + "path-is-inside@^1.0.1": "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53", + "path-key@^2.0.0": "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40", + "path-to-regexp@0.1.7": "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c", + "pify@^3.0.0": "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176", + "pirates@^4.0.0": "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87", + "posix-character-classes@^0.1.0": "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab", + "prepend-http@^1.0.1": "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc", + "process-nextick-args@~2.0.0": "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2", + "proxy-addr@~2.0.5": "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34", + "pseudomap@^1.0.2": "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3", + "pstree.remy@^1.1.6": "https://registry.yarnpkg.com/pstree.remy/-/pstree.remy-1.1.7.tgz#c76963a28047ed61542dc361aa26ee55a7fa15f3", + "qs@6.7.0": "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc", + "range-parser@~1.2.1": "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031", + "raw-body@2.4.0": "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332", + "rc@^1.0.1": "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed", + "rc@^1.1.6": "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed", + "rc@^1.2.7": "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed", + "readable-stream@^2.0.2": "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf", + "readable-stream@^2.0.6": "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf", + "readdirp@^2.2.1": "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525", + "regex-not@^1.0.0": "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c", + "regex-not@^1.0.2": "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c", + "registry-auth-token@^3.0.1": "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.4.0.tgz#d7446815433f5d5ed6431cd5dca21048f66b397e", + "registry-url@^3.0.3": "https://registry.yarnpkg.com/registry-url/-/registry-url-3.1.0.tgz#3d4ef870f73dde1d77f0cf9a381432444e174942", + "remove-trailing-separator@^1.0.1": "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef", + "repeat-element@^1.1.2": "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce", + "repeat-string@^1.6.1": "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637", + "resolve-url@^0.2.1": "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a", + "ret@~0.1.10": "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc", + "rimraf@^2.6.1": "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab", + "safe-buffer@5.1.2": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d", + "safe-buffer@^5.0.1": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519", + "safe-buffer@^5.1.2": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519", + "safe-buffer@~5.1.0": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d", + "safe-buffer@~5.1.1": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d", + "safe-regex@^1.1.0": "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e", + "safer-buffer@>= 2.1.2 < 3": "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a", + "sax@^1.2.4": "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9", + "semver-diff@^2.0.0": "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36", + "semver@^5.0.3": "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b", + "semver@^5.1.0": "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b", + "semver@^5.3.0": "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b", + "semver@^5.5.0": "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b", + "send@0.17.1": "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8", + "serve-static@1.14.1": "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9", + "set-blocking@~2.0.0": "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7", + "set-value@^2.0.0": "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b", + "set-value@^2.0.1": "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b", + "setprototypeof@1.1.1": "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683", + "shebang-command@^1.2.0": "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea", + "shebang-regex@^1.0.0": "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3", + "signal-exit@^3.0.0": "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d", + "signal-exit@^3.0.2": "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d", + "snapdragon-node@^2.0.1": "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b", + "snapdragon-util@^3.0.1": "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2", + "snapdragon@^0.8.1": "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d", + "source-map-resolve@^0.5.0": "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259", + "source-map-url@^0.4.0": "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3", + "source-map@^0.5.6": "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc", + "split-string@^3.0.1": "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2", + "split-string@^3.0.2": "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2", + "static-extend@^0.1.1": "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6", + "statuses@>= 1.5.0 < 2": "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c", + "statuses@~1.5.0": "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c", + "string-width@^1.0.1": "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3", + "string-width@^1.0.2 || 2": "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e", + "string-width@^2.0.0": "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e", + "string-width@^2.1.1": "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e", + "string_decoder@~1.1.1": "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8", + "strip-ansi@^3.0.0": "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf", + "strip-ansi@^3.0.1": "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf", + "strip-ansi@^4.0.0": "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f", + "strip-eof@^1.0.0": "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf", + "strip-json-comments@~2.0.1": "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a", + "sucrase@^3.10.1": "https://registry.yarnpkg.com/sucrase/-/sucrase-3.10.1.tgz#70ce0bad0e4c8fbc3c3184dbd1797e82990d0602", + "supports-color@^5.2.0": "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f", + "supports-color@^5.3.0": "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f", + "tar@^4": "https://registry.yarnpkg.com/tar/-/tar-4.4.10.tgz#946b2810b9a5e0b26140cf78bea6b0b0d689eba1", + "term-size@^1.2.0": "https://registry.yarnpkg.com/term-size/-/term-size-1.2.0.tgz#458b83887f288fc56d6fffbfad262e26638efa69", + "thenify-all@^1.0.0": "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726", + "thenify@>= 3.1.0 < 4": "https://registry.yarnpkg.com/thenify/-/thenify-3.3.0.tgz#e69e38a1babe969b0108207978b9f62b88604839", + "timed-out@^4.0.0": "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f", + "to-object-path@^0.3.0": "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af", + "to-regex-range@^2.1.0": "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38", + "to-regex@^3.0.1": "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce", + "to-regex@^3.0.2": "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce", + "toidentifier@1.0.0": "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553", + "touch@^3.1.0": "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b", + "type-is@~1.6.17": "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131", + "type-is@~1.6.18": "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131", + "undefsafe@^2.0.2": "https://registry.yarnpkg.com/undefsafe/-/undefsafe-2.0.2.tgz#225f6b9e0337663e0d8e7cfd686fc2836ccace76", + "union-value@^1.0.0": "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847", + "unique-string@^1.0.0": "https://registry.yarnpkg.com/unique-string/-/unique-string-1.0.0.tgz#9e1057cca851abb93398f8b33ae187b99caec11a", + "unpipe@1.0.0": "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec", + "unpipe@~1.0.0": "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec", + "unset-value@^1.0.0": "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559", + "unzip-response@^2.0.1": "https://registry.yarnpkg.com/unzip-response/-/unzip-response-2.0.1.tgz#d2f0f737d16b0615e72a6935ed04214572d56f97", + "upath@^1.1.1": "https://registry.yarnpkg.com/upath/-/upath-1.1.2.tgz#3db658600edaeeccbe6db5e684d67ee8c2acd068", + "update-notifier@^2.5.0": "https://registry.yarnpkg.com/update-notifier/-/update-notifier-2.5.0.tgz#d0744593e13f161e406acb1d9408b72cad08aff6", + "urix@^0.1.0": "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72", + "url-parse-lax@^1.0.0": "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73", + "use@^3.1.0": "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f", + "util-deprecate@~1.0.1": "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf", + "utils-merge@1.0.1": "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713", + "vary@~1.1.2": "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc", + "which@^1.2.9": "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a", + "wide-align@^1.1.0": "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457", + "widest-line@^2.0.0": "https://registry.yarnpkg.com/widest-line/-/widest-line-2.0.1.tgz#7438764730ec7ef4381ce4df82fb98a53142a3fc", + "wrappy@1": "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f", + "write-file-atomic@^2.0.0": "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.4.3.tgz#1fd2e9ae1df3e75b8d8c367443c692d4ca81f481", + "xdg-basedir@^3.0.0": "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4", + "yallist@^2.1.2": "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52", + "yallist@^3.0.0": "https://registry.yarnpkg.com/yallist/-/yallist-3.0.3.tgz#b4b049e314be545e3ce802236d6cd22cd91c3de9", + "yallist@^3.0.3": "https://registry.yarnpkg.com/yallist/-/yallist-3.0.3.tgz#b4b049e314be545e3ce802236d6cd22cd91c3de9" + }, + "files": [], + "artifacts": { + "fsevents@1.2.9": [ + "lib", + "lib/binding", + "lib/binding/Release", + "lib/binding/Release/node-v72-darwin-x64", + "lib/binding/Release/node-v72-darwin-x64/fse.node" + ] + } +} \ No newline at end of file diff --git a/node_modules/abbrev/LICENSE b/node_modules/abbrev/LICENSE new file mode 100644 index 00000000..9bcfa9d7 --- /dev/null +++ b/node_modules/abbrev/LICENSE @@ -0,0 +1,46 @@ +This software is dual-licensed under the ISC and MIT licenses. +You may use this software under EITHER of the following licenses. + +---------- + +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------- + +Copyright Isaac Z. Schlueter and Contributors +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/abbrev/README.md b/node_modules/abbrev/README.md new file mode 100644 index 00000000..99746fe6 --- /dev/null +++ b/node_modules/abbrev/README.md @@ -0,0 +1,23 @@ +# abbrev-js + +Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev). + +Usage: + + var abbrev = require("abbrev"); + abbrev("foo", "fool", "folding", "flop"); + + // returns: + { fl: 'flop' + , flo: 'flop' + , flop: 'flop' + , fol: 'folding' + , fold: 'folding' + , foldi: 'folding' + , foldin: 'folding' + , folding: 'folding' + , foo: 'foo' + , fool: 'fool' + } + +This is handy for command-line scripts, or other cases where you want to be able to accept shorthands. diff --git a/node_modules/abbrev/abbrev.js b/node_modules/abbrev/abbrev.js new file mode 100644 index 00000000..7b1dc5d6 --- /dev/null +++ b/node_modules/abbrev/abbrev.js @@ -0,0 +1,61 @@ +module.exports = exports = abbrev.abbrev = abbrev + +abbrev.monkeyPatch = monkeyPatch + +function monkeyPatch () { + Object.defineProperty(Array.prototype, 'abbrev', { + value: function () { return abbrev(this) }, + enumerable: false, configurable: true, writable: true + }) + + Object.defineProperty(Object.prototype, 'abbrev', { + value: function () { return abbrev(Object.keys(this)) }, + enumerable: false, configurable: true, writable: true + }) +} + +function abbrev (list) { + if (arguments.length !== 1 || !Array.isArray(list)) { + list = Array.prototype.slice.call(arguments, 0) + } + for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { + args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + args = args.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + var abbrevs = {} + , prev = "" + for (var i = 0, l = args.length ; i < l ; i ++) { + var current = args[i] + , next = args[i + 1] || "" + , nextMatches = true + , prevMatches = true + if (current === next) continue + for (var j = 0, cl = current.length ; j < cl ; j ++) { + var curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j ++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (var a = current.substr(0, j) ; j <= cl ; j ++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/node_modules/abbrev/package.json b/node_modules/abbrev/package.json new file mode 100644 index 00000000..bf4e8015 --- /dev/null +++ b/node_modules/abbrev/package.json @@ -0,0 +1,21 @@ +{ + "name": "abbrev", + "version": "1.1.1", + "description": "Like ruby's abbrev module, but in js", + "author": "Isaac Z. Schlueter ", + "main": "abbrev.js", + "scripts": { + "test": "tap test.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": "http://github.com/isaacs/abbrev-js", + "license": "ISC", + "devDependencies": { + "tap": "^10.1" + }, + "files": [ + "abbrev.js" + ] +} diff --git a/node_modules/accepts/HISTORY.md b/node_modules/accepts/HISTORY.md new file mode 100644 index 00000000..0bf04178 --- /dev/null +++ b/node_modules/accepts/HISTORY.md @@ -0,0 +1,236 @@ +1.3.7 / 2019-04-29 +================== + + * deps: negotiator@0.6.2 + - Fix sorting charset, encoding, and language with extra parameters + +1.3.6 / 2019-04-28 +================== + + * deps: mime-types@~2.1.24 + - deps: mime-db@~1.40.0 + +1.3.5 / 2018-02-28 +================== + + * deps: mime-types@~2.1.18 + - deps: mime-db@~1.33.0 + +1.3.4 / 2017-08-22 +================== + + * deps: mime-types@~2.1.16 + - deps: mime-db@~1.29.0 + +1.3.3 / 2016-05-02 +================== + + * deps: mime-types@~2.1.11 + - deps: mime-db@~1.23.0 + * deps: negotiator@0.6.1 + - perf: improve `Accept` parsing speed + - perf: improve `Accept-Charset` parsing speed + - perf: improve `Accept-Encoding` parsing speed + - perf: improve `Accept-Language` parsing speed + +1.3.2 / 2016-03-08 +================== + + * deps: mime-types@~2.1.10 + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + - deps: mime-db@~1.22.0 + +1.3.1 / 2016-01-19 +================== + + * deps: mime-types@~2.1.9 + - deps: mime-db@~1.21.0 + +1.3.0 / 2015-09-29 +================== + + * deps: mime-types@~2.1.7 + - deps: mime-db@~1.19.0 + * deps: negotiator@0.6.0 + - Fix including type extensions in parameters in `Accept` parsing + - Fix parsing `Accept` parameters with quoted equals + - Fix parsing `Accept` parameters with quoted semicolons + - Lazy-load modules from main entry point + - perf: delay type concatenation until needed + - perf: enable strict mode + - perf: hoist regular expressions + - perf: remove closures getting spec properties + - perf: remove a closure from media type parsing + - perf: remove property delete from media type parsing + +1.2.13 / 2015-09-06 +=================== + + * deps: mime-types@~2.1.6 + - deps: mime-db@~1.18.0 + +1.2.12 / 2015-07-30 +=================== + + * deps: mime-types@~2.1.4 + - deps: mime-db@~1.16.0 + +1.2.11 / 2015-07-16 +=================== + + * deps: mime-types@~2.1.3 + - deps: mime-db@~1.15.0 + +1.2.10 / 2015-07-01 +=================== + + * deps: mime-types@~2.1.2 + - deps: mime-db@~1.14.0 + +1.2.9 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - perf: fix deopt during mapping + +1.2.8 / 2015-06-07 +================== + + * deps: mime-types@~2.1.0 + - deps: mime-db@~1.13.0 + * perf: avoid argument reassignment & argument slice + * perf: avoid negotiator recursive construction + * perf: enable strict mode + * perf: remove unnecessary bitwise operator + +1.2.7 / 2015-05-10 +================== + + * deps: negotiator@0.5.3 + - Fix media type parameter matching to be case-insensitive + +1.2.6 / 2015-05-07 +================== + + * deps: mime-types@~2.0.11 + - deps: mime-db@~1.9.1 + * deps: negotiator@0.5.2 + - Fix comparing media types with quoted values + - Fix splitting media types with quoted commas + +1.2.5 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - deps: mime-db@~1.8.0 + +1.2.4 / 2015-02-14 +================== + + * Support Node.js 0.6 + * deps: mime-types@~2.0.9 + - deps: mime-db@~1.7.0 + * deps: negotiator@0.5.1 + - Fix preference sorting to be stable for long acceptable lists + +1.2.3 / 2015-01-31 +================== + + * deps: mime-types@~2.0.8 + - deps: mime-db@~1.6.0 + +1.2.2 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - deps: mime-db@~1.5.0 + +1.2.1 / 2014-12-30 +================== + + * deps: mime-types@~2.0.5 + - deps: mime-db@~1.3.1 + +1.2.0 / 2014-12-19 +================== + + * deps: negotiator@0.5.0 + - Fix list return order when large accepted list + - Fix missing identity encoding when q=0 exists + - Remove dynamic building of Negotiator class + +1.1.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - deps: mime-db@~1.3.0 + +1.1.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - deps: mime-db@~1.2.0 + +1.1.2 / 2014-10-14 +================== + + * deps: negotiator@0.4.9 + - Fix error when media type has invalid parameter + +1.1.1 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - deps: mime-db@~1.1.0 + * deps: negotiator@0.4.8 + - Fix all negotiations to be case-insensitive + - Stable sort preferences of same quality according to client order + +1.1.0 / 2014-09-02 +================== + + * update `mime-types` + +1.0.7 / 2014-07-04 +================== + + * Fix wrong type returned from `type` when match after unknown extension + +1.0.6 / 2014-06-24 +================== + + * deps: negotiator@0.4.7 + +1.0.5 / 2014-06-20 +================== + + * fix crash when unknown extension given + +1.0.4 / 2014-06-19 +================== + + * use `mime-types` + +1.0.3 / 2014-06-11 +================== + + * deps: negotiator@0.4.6 + - Order by specificity when quality is the same + +1.0.2 / 2014-05-29 +================== + + * Fix interpretation when header not in request + * deps: pin negotiator@0.4.5 + +1.0.1 / 2014-01-18 +================== + + * Identity encoding isn't always acceptable + * deps: negotiator@~0.4.0 + +1.0.0 / 2013-12-27 +================== + + * Genesis diff --git a/node_modules/accepts/LICENSE b/node_modules/accepts/LICENSE new file mode 100644 index 00000000..06166077 --- /dev/null +++ b/node_modules/accepts/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/accepts/README.md b/node_modules/accepts/README.md new file mode 100644 index 00000000..66a2f540 --- /dev/null +++ b/node_modules/accepts/README.md @@ -0,0 +1,142 @@ +# accepts + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator). +Extracted from [koa](https://www.npmjs.com/package/koa) for general use. + +In addition to negotiator, it allows: + +- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])` + as well as `('text/html', 'application/json')`. +- Allows type shorthands such as `json`. +- Returns `false` when no types match +- Treats non-existent headers as `*` + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install accepts +``` + +## API + + + +```js +var accepts = require('accepts') +``` + +### accepts(req) + +Create a new `Accepts` object for the given `req`. + +#### .charset(charsets) + +Return the first accepted charset. If nothing in `charsets` is accepted, +then `false` is returned. + +#### .charsets() + +Return the charsets that the request accepts, in the order of the client's +preference (most preferred first). + +#### .encoding(encodings) + +Return the first accepted encoding. If nothing in `encodings` is accepted, +then `false` is returned. + +#### .encodings() + +Return the encodings that the request accepts, in the order of the client's +preference (most preferred first). + +#### .language(languages) + +Return the first accepted language. If nothing in `languages` is accepted, +then `false` is returned. + +#### .languages() + +Return the languages that the request accepts, in the order of the client's +preference (most preferred first). + +#### .type(types) + +Return the first accepted type (and it is returned as the same text as what +appears in the `types` array). If nothing in `types` is accepted, then `false` +is returned. + +The `types` array can contain full MIME types or file extensions. Any value +that is not a full MIME types is passed to `require('mime-types').lookup`. + +#### .types() + +Return the types that the request accepts, in the order of the client's +preference (most preferred first). + +## Examples + +### Simple type negotiation + +This simple example shows how to use `accepts` to return a different typed +respond body based on what the client wants to accept. The server lists it's +preferences in order and will get back the best match between the client and +server. + +```js +var accepts = require('accepts') +var http = require('http') + +function app (req, res) { + var accept = accepts(req) + + // the order of this list is significant; should be server preferred order + switch (accept.type(['json', 'html'])) { + case 'json': + res.setHeader('Content-Type', 'application/json') + res.write('{"hello":"world!"}') + break + case 'html': + res.setHeader('Content-Type', 'text/html') + res.write('hello, world!') + break + default: + // the fallback is text/plain, so no need to specify it above + res.setHeader('Content-Type', 'text/plain') + res.write('hello, world!') + break + } + + res.end() +} + +http.createServer(app).listen(3000) +``` + +You can test this out with the cURL program: +```sh +curl -I -H'Accept: text/html' http://localhost:3000/ +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master +[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master +[node-version-image]: https://badgen.net/npm/node/accepts +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/accepts +[npm-url]: https://npmjs.org/package/accepts +[npm-version-image]: https://badgen.net/npm/v/accepts +[travis-image]: https://badgen.net/travis/jshttp/accepts/master +[travis-url]: https://travis-ci.org/jshttp/accepts diff --git a/node_modules/accepts/index.js b/node_modules/accepts/index.js new file mode 100644 index 00000000..e9b2f63f --- /dev/null +++ b/node_modules/accepts/index.js @@ -0,0 +1,238 @@ +/*! + * accepts + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var Negotiator = require('negotiator') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = Accepts + +/** + * Create a new Accepts object for the given req. + * + * @param {object} req + * @public + */ + +function Accepts (req) { + if (!(this instanceof Accepts)) { + return new Accepts(req) + } + + this.headers = req.headers + this.negotiator = new Negotiator(req) +} + +/** + * Check if the given `type(s)` is acceptable, returning + * the best match when true, otherwise `undefined`, in which + * case you should respond with 406 "Not Acceptable". + * + * The `type` value may be a single mime type string + * such as "application/json", the extension name + * such as "json" or an array `["json", "html", "text/plain"]`. When a list + * or array is given the _best_ match, if any is returned. + * + * Examples: + * + * // Accept: text/html + * this.types('html'); + * // => "html" + * + * // Accept: text/*, application/json + * this.types('html'); + * // => "html" + * this.types('text/html'); + * // => "text/html" + * this.types('json', 'text'); + * // => "json" + * this.types('application/json'); + * // => "application/json" + * + * // Accept: text/*, application/json + * this.types('image/png'); + * this.types('png'); + * // => undefined + * + * // Accept: text/*;q=.5, application/json + * this.types(['html', 'json']); + * this.types('html', 'json'); + * // => "json" + * + * @param {String|Array} types... + * @return {String|Array|Boolean} + * @public + */ + +Accepts.prototype.type = +Accepts.prototype.types = function (types_) { + var types = types_ + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i] + } + } + + // no types, return all requested types + if (!types || types.length === 0) { + return this.negotiator.mediaTypes() + } + + // no accept header, return first given type + if (!this.headers.accept) { + return types[0] + } + + var mimes = types.map(extToMime) + var accepts = this.negotiator.mediaTypes(mimes.filter(validMime)) + var first = accepts[0] + + return first + ? types[mimes.indexOf(first)] + : false +} + +/** + * Return accepted encodings or best fit based on `encodings`. + * + * Given `Accept-Encoding: gzip, deflate` + * an array sorted by quality is returned: + * + * ['gzip', 'deflate'] + * + * @param {String|Array} encodings... + * @return {String|Array} + * @public + */ + +Accepts.prototype.encoding = +Accepts.prototype.encodings = function (encodings_) { + var encodings = encodings_ + + // support flattened arguments + if (encodings && !Array.isArray(encodings)) { + encodings = new Array(arguments.length) + for (var i = 0; i < encodings.length; i++) { + encodings[i] = arguments[i] + } + } + + // no encodings, return all requested encodings + if (!encodings || encodings.length === 0) { + return this.negotiator.encodings() + } + + return this.negotiator.encodings(encodings)[0] || false +} + +/** + * Return accepted charsets or best fit based on `charsets`. + * + * Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5` + * an array sorted by quality is returned: + * + * ['utf-8', 'utf-7', 'iso-8859-1'] + * + * @param {String|Array} charsets... + * @return {String|Array} + * @public + */ + +Accepts.prototype.charset = +Accepts.prototype.charsets = function (charsets_) { + var charsets = charsets_ + + // support flattened arguments + if (charsets && !Array.isArray(charsets)) { + charsets = new Array(arguments.length) + for (var i = 0; i < charsets.length; i++) { + charsets[i] = arguments[i] + } + } + + // no charsets, return all requested charsets + if (!charsets || charsets.length === 0) { + return this.negotiator.charsets() + } + + return this.negotiator.charsets(charsets)[0] || false +} + +/** + * Return accepted languages or best fit based on `langs`. + * + * Given `Accept-Language: en;q=0.8, es, pt` + * an array sorted by quality is returned: + * + * ['es', 'pt', 'en'] + * + * @param {String|Array} langs... + * @return {Array|String} + * @public + */ + +Accepts.prototype.lang = +Accepts.prototype.langs = +Accepts.prototype.language = +Accepts.prototype.languages = function (languages_) { + var languages = languages_ + + // support flattened arguments + if (languages && !Array.isArray(languages)) { + languages = new Array(arguments.length) + for (var i = 0; i < languages.length; i++) { + languages[i] = arguments[i] + } + } + + // no languages, return all requested languages + if (!languages || languages.length === 0) { + return this.negotiator.languages() + } + + return this.negotiator.languages(languages)[0] || false +} + +/** + * Convert extnames to mime. + * + * @param {String} type + * @return {String} + * @private + */ + +function extToMime (type) { + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if mime is valid. + * + * @param {String} type + * @return {String} + * @private + */ + +function validMime (type) { + return typeof type === 'string' +} diff --git a/node_modules/accepts/package.json b/node_modules/accepts/package.json new file mode 100644 index 00000000..bc750cf8 --- /dev/null +++ b/node_modules/accepts/package.json @@ -0,0 +1,47 @@ +{ + "name": "accepts", + "description": "Higher-level content negotiation", + "version": "1.3.7", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/accepts", + "dependencies": { + "mime-types": "~2.1.24", + "negotiator": "0.6.2" + }, + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.17.2", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "mocha": "6.1.4", + "nyc": "14.0.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test" + }, + "keywords": [ + "content", + "negotiation", + "accept", + "accepts" + ] +} diff --git a/node_modules/ansi-align/CHANGELOG.md b/node_modules/ansi-align/CHANGELOG.md new file mode 100644 index 00000000..621e50a9 --- /dev/null +++ b/node_modules/ansi-align/CHANGELOG.md @@ -0,0 +1,36 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +# [2.0.0](https://github.com/nexdrew/ansi-align/compare/v1.1.0...v2.0.0) (2017-05-01) + + +### Features + +* ES2015ify, dropping support for Node <4 ([#30](https://github.com/nexdrew/ansi-align/issues/30)) ([7b43f48](https://github.com/nexdrew/ansi-align/commit/7b43f48)) + + +### BREAKING CHANGES + +* Node 0.10 or 0.12 no longer supported, please update to Node 4+ or use ansi-align@1.1.0 + + + + +# [1.1.0](https://github.com/nexdrew/ansi-align/compare/v1.0.0...v1.1.0) (2016-06-06) + + +### Features + +* support left-alignment as no-op ([#3](https://github.com/nexdrew/ansi-align/issues/3)) ([e581db6](https://github.com/nexdrew/ansi-align/commit/e581db6)) + + + + +# 1.0.0 (2016-04-30) + + +### Features + +* initial commit ([1914d90](https://github.com/nexdrew/ansi-align/commit/1914d90)) diff --git a/node_modules/ansi-align/LICENSE b/node_modules/ansi-align/LICENSE new file mode 100644 index 00000000..ab601b65 --- /dev/null +++ b/node_modules/ansi-align/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2016, Contributors + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. diff --git a/node_modules/ansi-align/README.md b/node_modules/ansi-align/README.md new file mode 100644 index 00000000..3aafc67f --- /dev/null +++ b/node_modules/ansi-align/README.md @@ -0,0 +1,79 @@ +# ansi-align + +> align-text with ANSI support for CLIs + +[![Build Status](https://travis-ci.org/nexdrew/ansi-align.svg?branch=master)](https://travis-ci.org/nexdrew/ansi-align) +[![Coverage Status](https://coveralls.io/repos/github/nexdrew/ansi-align/badge.svg?branch=master)](https://coveralls.io/github/nexdrew/ansi-align?branch=master) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +Easily center- or right- align a block of text, carefully ignoring ANSI escape codes. + +E.g. turn this: + +ansi text block no alignment :( + +Into this: + +ansi text block center aligned! + +## Install + +```sh +npm install --save ansi-align +``` + +```js +var ansiAlign = require('ansi-align') +``` + +## API + +### `ansiAlign(text, [opts])` + +Align the given text per the line with the greatest [`string-width`](https://github.com/sindresorhus/string-width), returning a new string (or array). + +#### Arguments + +- `text`: required, string or array + + The text to align. If a string is given, it will be split using either the `opts.split` value or `'\n'` by default. If an array is given, a different array of modified strings will be returned. + +- `opts`: optional, object + + Options to change behavior, see below. + +#### Options + +- `opts.align`: string, default `'center'` + + The alignment mode. Use `'center'` for center-alignment, `'right'` for right-alignment, or `'left'` for left-alignment. Note that the given `text` is assumed to be left-aligned already, so specifying `align: 'left'` just returns the `text` as is (no-op). + +- `opts.split`: string or RegExp, default `'\n'` + + The separator to use when splitting the text. Only used if text is given as a string. + +- `opts.pad`: string, default `' '` + + The value used to left-pad (prepend to) lines of lesser width. Will be repeated as necessary to adjust alignment to the line with the greatest width. + +### `ansiAlign.center(text)` + +Alias for `ansiAlign(text, { align: 'center' })`. + +### `ansiAlign.right(text)` + +Alias for `ansiAlign(text, { align: 'right' })`. + +### `ansiAlign.left(text)` + +Alias for `ansiAlign(text, { align: 'left' })`, which is a no-op. + +## Similar Packages + +- [`center-align`](https://github.com/jonschlinkert/center-align): Very close to this package, except it doesn't support ANSI codes. +- [`left-pad`](https://github.com/camwest/left-pad): Great for left-padding but does not support center alignment or ANSI codes. +- Pretty much anything by the [chalk](https://github.com/chalk) team + +## License + +ISC © Contributors diff --git a/node_modules/ansi-align/index.js b/node_modules/ansi-align/index.js new file mode 100644 index 00000000..67fa826d --- /dev/null +++ b/node_modules/ansi-align/index.js @@ -0,0 +1,61 @@ +'use strict' + +const stringWidth = require('string-width') + +function ansiAlign (text, opts) { + if (!text) return text + + opts = opts || {} + const align = opts.align || 'center' + + // short-circuit `align: 'left'` as no-op + if (align === 'left') return text + + const split = opts.split || '\n' + const pad = opts.pad || ' ' + const widthDiffFn = align !== 'right' ? halfDiff : fullDiff + + let returnString = false + if (!Array.isArray(text)) { + returnString = true + text = String(text).split(split) + } + + let width + let maxWidth = 0 + text = text.map(function (str) { + str = String(str) + width = stringWidth(str) + maxWidth = Math.max(width, maxWidth) + return { + str, + width + } + }).map(function (obj) { + return new Array(widthDiffFn(maxWidth, obj.width) + 1).join(pad) + obj.str + }) + + return returnString ? text.join(split) : text +} + +ansiAlign.left = function left (text) { + return ansiAlign(text, { align: 'left' }) +} + +ansiAlign.center = function center (text) { + return ansiAlign(text, { align: 'center' }) +} + +ansiAlign.right = function right (text) { + return ansiAlign(text, { align: 'right' }) +} + +module.exports = ansiAlign + +function halfDiff (maxWidth, curWidth) { + return Math.floor((maxWidth - curWidth) / 2) +} + +function fullDiff (maxWidth, curWidth) { + return maxWidth - curWidth +} diff --git a/node_modules/ansi-align/package.json b/node_modules/ansi-align/package.json new file mode 100644 index 00000000..ef8e4f8b --- /dev/null +++ b/node_modules/ansi-align/package.json @@ -0,0 +1,43 @@ +{ + "name": "ansi-align", + "version": "2.0.0", + "description": "align-text with ANSI support for CLIs", + "main": "index.js", + "scripts": { + "pretest": "standard", + "test": "nyc ava", + "coverage": "nyc report --reporter=text-lcov | coveralls", + "release": "standard-version" + }, + "files": [ + "index.js" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/nexdrew/ansi-align.git" + }, + "keywords": [ + "ansi", + "align", + "cli", + "center", + "pad" + ], + "author": "nexdrew", + "license": "ISC", + "bugs": { + "url": "https://github.com/nexdrew/ansi-align/issues" + }, + "homepage": "https://github.com/nexdrew/ansi-align#readme", + "dependencies": { + "string-width": "^2.0.0" + }, + "devDependencies": { + "ava": "^0.19.1", + "chalk": "^1.1.3", + "coveralls": "^2.13.1", + "nyc": "^10.3.0", + "standard": "^10.0.2", + "standard-version": "^4.0.0" + } +} diff --git a/node_modules/ansi-regex/index.js b/node_modules/ansi-regex/index.js new file mode 100644 index 00000000..b9574ed7 --- /dev/null +++ b/node_modules/ansi-regex/index.js @@ -0,0 +1,4 @@ +'use strict'; +module.exports = function () { + return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g; +}; diff --git a/node_modules/ansi-regex/license b/node_modules/ansi-regex/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/ansi-regex/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/ansi-regex/package.json b/node_modules/ansi-regex/package.json new file mode 100644 index 00000000..eb44fb5c --- /dev/null +++ b/node_modules/ansi-regex/package.json @@ -0,0 +1,64 @@ +{ + "name": "ansi-regex", + "version": "2.1.1", + "description": "Regular expression for matching ANSI escape codes", + "license": "MIT", + "repository": "chalk/ansi-regex", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "maintainers": [ + "Sindre Sorhus (sindresorhus.com)", + "Joshua Appelman (jbnicolai.com)", + "JD Ballard (github.com/qix-)" + ], + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava --verbose", + "view-supported": "node fixtures/view-codes.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "text", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "pattern" + ], + "devDependencies": { + "ava": "0.17.0", + "xo": "0.16.0" + }, + "xo": { + "rules": { + "guard-for-in": 0, + "no-loop-func": 0 + } + } +} diff --git a/node_modules/ansi-regex/readme.md b/node_modules/ansi-regex/readme.md new file mode 100644 index 00000000..6a928edf --- /dev/null +++ b/node_modules/ansi-regex/readme.md @@ -0,0 +1,39 @@ +# ansi-regex [![Build Status](https://travis-ci.org/chalk/ansi-regex.svg?branch=master)](https://travis-ci.org/chalk/ansi-regex) + +> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install --save ansi-regex +``` + + +## Usage + +```js +const ansiRegex = require('ansi-regex'); + +ansiRegex().test('\u001b[4mcake\u001b[0m'); +//=> true + +ansiRegex().test('cake'); +//=> false + +'\u001b[4mcake\u001b[0m'.match(ansiRegex()); +//=> ['\u001b[4m', '\u001b[0m'] +``` + +## FAQ + +### Why do you test for codes not in the ECMA 48 standard? + +Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. If I recall correctly, we test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them. + +On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/ansi-styles/index.js b/node_modules/ansi-styles/index.js new file mode 100644 index 00000000..90a871c4 --- /dev/null +++ b/node_modules/ansi-styles/index.js @@ -0,0 +1,165 @@ +'use strict'; +const colorConvert = require('color-convert'); + +const wrapAnsi16 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${code + offset}m`; +}; + +const wrapAnsi256 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};5;${code}m`; +}; + +const wrapAnsi16m = (fn, offset) => function () { + const rgb = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`; +}; + +function assembleStyles() { + const codes = new Map(); + const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + gray: [90, 39], + + // Bright color + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39] + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49] + } + }; + + // Fix humans + styles.color.grey = styles.color.gray; + + for (const groupName of Object.keys(styles)) { + const group = styles[groupName]; + + for (const styleName of Object.keys(group)) { + const style = group[styleName]; + + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m` + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false + }); + } + + const ansi2ansi = n => n; + const rgb2rgb = (r, g, b) => [r, g, b]; + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + styles.color.ansi = { + ansi: wrapAnsi16(ansi2ansi, 0) + }; + styles.color.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 0) + }; + styles.color.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 0) + }; + + styles.bgColor.ansi = { + ansi: wrapAnsi16(ansi2ansi, 10) + }; + styles.bgColor.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 10) + }; + styles.bgColor.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 10) + }; + + for (let key of Object.keys(colorConvert)) { + if (typeof colorConvert[key] !== 'object') { + continue; + } + + const suite = colorConvert[key]; + + if (key === 'ansi16') { + key = 'ansi'; + } + + if ('ansi16' in suite) { + styles.color.ansi[key] = wrapAnsi16(suite.ansi16, 0); + styles.bgColor.ansi[key] = wrapAnsi16(suite.ansi16, 10); + } + + if ('ansi256' in suite) { + styles.color.ansi256[key] = wrapAnsi256(suite.ansi256, 0); + styles.bgColor.ansi256[key] = wrapAnsi256(suite.ansi256, 10); + } + + if ('rgb' in suite) { + styles.color.ansi16m[key] = wrapAnsi16m(suite.rgb, 0); + styles.bgColor.ansi16m[key] = wrapAnsi16m(suite.rgb, 10); + } + } + + return styles; +} + +// Make the export immutable +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/node_modules/ansi-styles/license b/node_modules/ansi-styles/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/ansi-styles/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-styles/package.json b/node_modules/ansi-styles/package.json new file mode 100644 index 00000000..65edb48c --- /dev/null +++ b/node_modules/ansi-styles/package.json @@ -0,0 +1,56 @@ +{ + "name": "ansi-styles", + "version": "3.2.1", + "description": "ANSI escape codes for styling strings in the terminal", + "license": "MIT", + "repository": "chalk/ansi-styles", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava", + "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor" + }, + "files": [ + "index.js" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "color-convert": "^1.9.0" + }, + "devDependencies": { + "ava": "*", + "babel-polyfill": "^6.23.0", + "svg-term-cli": "^2.1.1", + "xo": "*" + }, + "ava": { + "require": "babel-polyfill" + } +} diff --git a/node_modules/ansi-styles/readme.md b/node_modules/ansi-styles/readme.md new file mode 100644 index 00000000..3158e2df --- /dev/null +++ b/node_modules/ansi-styles/readme.md @@ -0,0 +1,147 @@ +# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + + + + +## Install + +``` +$ npm install ansi-styles +``` + + +## Usage + +```js +const style = require('ansi-styles'); + +console.log(`${style.green.open}Hello world!${style.green.close}`); + + +// Color conversion between 16/256/truecolor +// NOTE: If conversion goes to 16 colors or 256 colors, the original color +// may be degraded to fit that color palette. This means terminals +// that do not support 16 million colors will best-match the +// original color. +console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close); +console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close); +console.log(style.color.ansi16m.hex('#ABCDEF') + 'Hello world!' + style.color.close); +``` + +## API + +Each style has an `open` and `close` property. + + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(Not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(Not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `gray` ("bright black") +- `redBright` +- `greenBright` +- `yellowBright` +- `blueBright` +- `magentaBright` +- `cyanBright` +- `whiteBright` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` +- `bgBlackBright` +- `bgRedBright` +- `bgGreenBright` +- `bgYellowBright` +- `bgBlueBright` +- `bgMagentaBright` +- `bgCyanBright` +- `bgWhiteBright` + + +## Advanced usage + +By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `style.modifier` +- `style.color` +- `style.bgColor` + +###### Example + +```js +console.log(style.color.green.open); +``` + +Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values. + +###### Example + +```js +console.log(style.codes.get(36)); +//=> 39 +``` + + +## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728) + +`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors. + +To use these, call the associated conversion function with the intended output, for example: + +```js +style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code +style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code + +style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code +style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code + +style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code +style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code +``` + + +## Related + +- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/any-promise/.jshintrc b/node_modules/any-promise/.jshintrc new file mode 100644 index 00000000..979105e9 --- /dev/null +++ b/node_modules/any-promise/.jshintrc @@ -0,0 +1,4 @@ +{ + "node":true, + "strict":true +} diff --git a/node_modules/any-promise/.npmignore b/node_modules/any-promise/.npmignore new file mode 100644 index 00000000..1354abc0 --- /dev/null +++ b/node_modules/any-promise/.npmignore @@ -0,0 +1,7 @@ +.git* +test/ +test-browser/ +build/ +.travis.yml +*.swp +Makefile diff --git a/node_modules/any-promise/LICENSE b/node_modules/any-promise/LICENSE new file mode 100644 index 00000000..9187fe5d --- /dev/null +++ b/node_modules/any-promise/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2014-2016 Kevin Beaty + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/any-promise/README.md b/node_modules/any-promise/README.md new file mode 100644 index 00000000..174bea4a --- /dev/null +++ b/node_modules/any-promise/README.md @@ -0,0 +1,161 @@ +## Any Promise + +[![Build Status](https://secure.travis-ci.org/kevinbeaty/any-promise.svg)](http://travis-ci.org/kevinbeaty/any-promise) + +Let your library support any ES 2015 (ES6) compatible `Promise` and leave the choice to application authors. The application can *optionally* register its preferred `Promise` implementation and it will be exported when requiring `any-promise` from library code. + +If no preference is registered, defaults to the global `Promise` for newer Node.js versions. The browser version defaults to the window `Promise`, so polyfill or register as necessary. + +### Usage with global Promise: + +Assuming the global `Promise` is the desired implementation: + +```bash +# Install any libraries depending on any-promise +$ npm install mz +``` + +The installed libraries will use global Promise by default. + +```js +// in library +var Promise = require('any-promise') // the global Promise + +function promiseReturningFunction(){ + return new Promise(function(resolve, reject){...}) +} +``` + +### Usage with registration: + +Assuming `bluebird` is the desired Promise implementation: + +```bash +# Install preferred promise library +$ npm install bluebird +# Install any-promise to allow registration +$ npm install any-promise +# Install any libraries you would like to use depending on any-promise +$ npm install mz +``` + +Register your preference in the application entry point before any other `require` of packages that load `any-promise`: + +```javascript +// top of application index.js or other entry point +require('any-promise/register/bluebird') + +// -or- Equivalent to above, but allows customization of Promise library +require('any-promise/register')('bluebird', {Promise: require('bluebird')}) +``` + +Now that the implementation is registered, you can use any package depending on `any-promise`: + + +```javascript +var fsp = require('mz/fs') // mz/fs will use registered bluebird promises +var Promise = require('any-promise') // the registered bluebird promise +``` + +It is safe to call `register` multiple times, but it must always be with the same implementation. + +Again, registration is *optional*. It should only be called by the application user if overriding the global `Promise` implementation is desired. + +### Optional Application Registration + +As an application author, you can *optionally* register a preferred `Promise` implementation on application startup (before any call to `require('any-promise')`: + +You must register your preference before any call to `require('any-promise')` (by you or required packages), and only one implementation can be registered. Typically, this registration would occur at the top of the application entry point. + + +#### Registration shortcuts + +If you are using a known `Promise` implementation, you can register your preference with a shortcut: + + +```js +require('any-promise/register/bluebird') +// -or- +import 'any-promise/register/q'; +``` + +Shortcut registration is the preferred registration method as it works in the browser and Node.js. It is also convenient for using with `import` and many test runners, that offer a `--require` flag: + +``` +$ ava --require=any-promise/register/bluebird test.js +``` + +Current known implementations include `bluebird`, `q`, `when`, `rsvp`, `es6-promise`, `promise`, `native-promise-only`, `pinkie`, `vow` and `lie`. If you are not using a known implementation, you can use another registration method described below. + + +#### Basic Registration + +As an alternative to registration shortcuts, you can call the `register` function with the preferred `Promise` implementation. The benefit of this approach is that a `Promise` library can be required by name without being a known implementation. This approach does NOT work in the browser. To use `any-promise` in the browser use either registration shortcuts or specify the `Promise` constructor using advanced registration (see below). + +```javascript +require('any-promise/register')('when') +// -or- require('any-promise/register')('any other ES6 compatible library (known or otherwise)') +``` + +This registration method will try to detect the `Promise` constructor from requiring the specified implementation. If you would like to specify your own constructor, see advanced registration. + + +#### Advanced Registration + +To use the browser version, you should either install a polyfill or explicitly register the `Promise` constructor: + +```javascript +require('any-promise/register')('bluebird', {Promise: require('bluebird')}) +``` + +This could also be used for registering a custom `Promise` implementation or subclass. + +Your preference will be registered globally, allowing a single registration even if multiple versions of `any-promise` are installed in the NPM dependency tree or are using multiple bundled JavaScript files in the browser. You can bypass this global registration in options: + + +```javascript +require('../register')('es6-promise', {Promise: require('es6-promise').Promise, global: false}) +``` + +### Library Usage + +To use any `Promise` constructor, simply require it: + +```javascript +var Promise = require('any-promise'); + +return Promise + .all([xf, f, init, coll]) + .then(fn); + + +return new Promise(function(resolve, reject){ + try { + resolve(item); + } catch(e){ + reject(e); + } +}); + +``` + +Except noted below, libraries using `any-promise` should only use [documented](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) functions as there is no guarantee which implementation will be chosen by the application author. Libraries should never call `register`, only the application user should call if desired. + + +#### Advanced Library Usage + +If your library needs to branch code based on the registered implementation, you can retrieve it using `var impl = require('any-promise/implementation')`, where `impl` will be the package name (`"bluebird"`, `"when"`, etc.) if registered, `"global.Promise"` if using the global version on Node.js, or `"window.Promise"` if using the browser version. You should always include a default case, as there is no guarantee what package may be registered. + + +### Support for old Node.js versions + +Node.js versions prior to `v0.12` may have contained buggy versions of the global `Promise`. For this reason, the global `Promise` is not loaded automatically for these old versions. If using `any-promise` in Node.js versions versions `<= v0.12`, the user should register a desired implementation. + +If an implementation is not registered, `any-promise` will attempt to discover an installed `Promise` implementation. If no implementation can be found, an error will be thrown on `require('any-promise')`. While the auto-discovery usually avoids errors, it is non-deterministic. It is recommended that the user always register a preferred implementation for older Node.js versions. + +This auto-discovery is only available for Node.jS versions prior to `v0.12`. Any newer versions will always default to the global `Promise` implementation. + +### Related + +- [any-observable](https://github.com/sindresorhus/any-observable) - `any-promise` for Observables. + diff --git a/node_modules/any-promise/implementation.d.ts b/node_modules/any-promise/implementation.d.ts new file mode 100644 index 00000000..c331a56a --- /dev/null +++ b/node_modules/any-promise/implementation.d.ts @@ -0,0 +1,3 @@ +declare var implementation: string; + +export = implementation; diff --git a/node_modules/any-promise/implementation.js b/node_modules/any-promise/implementation.js new file mode 100644 index 00000000..a45ae94d --- /dev/null +++ b/node_modules/any-promise/implementation.js @@ -0,0 +1 @@ +module.exports = require('./register')().implementation diff --git a/node_modules/any-promise/index.d.ts b/node_modules/any-promise/index.d.ts new file mode 100644 index 00000000..9f646c5d --- /dev/null +++ b/node_modules/any-promise/index.d.ts @@ -0,0 +1,73 @@ +declare class Promise implements Promise.Thenable { + /** + * If you call resolve in the body of the callback passed to the constructor, + * your promise is fulfilled with result object passed to resolve. + * If you call reject your promise is rejected with the object passed to resolve. + * For consistency and debugging (eg stack traces), obj should be an instanceof Error. + * Any errors thrown in the constructor callback will be implicitly passed to reject(). + */ + constructor (callback: (resolve : (value?: R | Promise.Thenable) => void, reject: (error?: any) => void) => void); + + /** + * onFulfilled is called when/if "promise" resolves. onRejected is called when/if "promise" rejects. + * Both are optional, if either/both are omitted the next onFulfilled/onRejected in the chain is called. + * Both callbacks have a single parameter , the fulfillment value or rejection reason. + * "then" returns a new promise equivalent to the value you return from onFulfilled/onRejected after being passed through Promise.resolve. + * If an error is thrown in the callback, the returned promise rejects with that error. + * + * @param onFulfilled called when/if "promise" resolves + * @param onRejected called when/if "promise" rejects + */ + then (onFulfilled?: (value: R) => U | Promise.Thenable, onRejected?: (error: any) => U | Promise.Thenable): Promise; + then (onFulfilled?: (value: R) => U | Promise.Thenable, onRejected?: (error: any) => void): Promise; + + /** + * Sugar for promise.then(undefined, onRejected) + * + * @param onRejected called when/if "promise" rejects + */ + catch (onRejected?: (error: any) => U | Promise.Thenable): Promise; + + /** + * Make a new promise from the thenable. + * A thenable is promise-like in as far as it has a "then" method. + */ + static resolve (): Promise; + static resolve (value: R | Promise.Thenable): Promise; + + /** + * Make a promise that rejects to obj. For consistency and debugging (eg stack traces), obj should be an instanceof Error + */ + static reject (error: any): Promise; + + /** + * Make a promise that fulfills when every item in the array fulfills, and rejects if (and when) any item rejects. + * the array passed to all can be a mixture of promise-like objects and other objects. + * The fulfillment value is an array (in order) of fulfillment values. The rejection value is the first rejection value. + */ + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable, T7 | Promise.Thenable, T8 | Promise.Thenable, T9 | Promise.Thenable, T10 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable, T7 | Promise.Thenable, T8 | Promise.Thenable, T9 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable, T7 | Promise.Thenable, T8 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable, T7 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6, T7]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable ]): Promise<[T1, T2, T3, T4]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable]): Promise<[T1, T2, T3]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable]): Promise<[T1, T2]>; + static all (values: [T1 | Promise.Thenable]): Promise<[T1]>; + static all (values: Array>): Promise; + + /** + * Make a Promise that fulfills when any item fulfills, and rejects if any item rejects. + */ + static race (promises: (R | Promise.Thenable)[]): Promise; +} + +declare namespace Promise { + export interface Thenable { + then (onFulfilled?: (value: R) => U | Thenable, onRejected?: (error: any) => U | Thenable): Thenable; + then (onFulfilled?: (value: R) => U | Thenable, onRejected?: (error: any) => void): Thenable; + } +} + +export = Promise; diff --git a/node_modules/any-promise/index.js b/node_modules/any-promise/index.js new file mode 100644 index 00000000..74b85483 --- /dev/null +++ b/node_modules/any-promise/index.js @@ -0,0 +1 @@ +module.exports = require('./register')().Promise diff --git a/node_modules/any-promise/loader.js b/node_modules/any-promise/loader.js new file mode 100644 index 00000000..e1649142 --- /dev/null +++ b/node_modules/any-promise/loader.js @@ -0,0 +1,78 @@ +"use strict" + // global key for user preferred registration +var REGISTRATION_KEY = '@@any-promise/REGISTRATION', + // Prior registration (preferred or detected) + registered = null + +/** + * Registers the given implementation. An implementation must + * be registered prior to any call to `require("any-promise")`, + * typically on application load. + * + * If called with no arguments, will return registration in + * following priority: + * + * For Node.js: + * + * 1. Previous registration + * 2. global.Promise if node.js version >= 0.12 + * 3. Auto detected promise based on first sucessful require of + * known promise libraries. Note this is a last resort, as the + * loaded library is non-deterministic. node.js >= 0.12 will + * always use global.Promise over this priority list. + * 4. Throws error. + * + * For Browser: + * + * 1. Previous registration + * 2. window.Promise + * 3. Throws error. + * + * Options: + * + * Promise: Desired Promise constructor + * global: Boolean - Should the registration be cached in a global variable to + * allow cross dependency/bundle registration? (default true) + */ +module.exports = function(root, loadImplementation){ + return function register(implementation, opts){ + implementation = implementation || null + opts = opts || {} + // global registration unless explicitly {global: false} in options (default true) + var registerGlobal = opts.global !== false; + + // load any previous global registration + if(registered === null && registerGlobal){ + registered = root[REGISTRATION_KEY] || null + } + + if(registered !== null + && implementation !== null + && registered.implementation !== implementation){ + // Throw error if attempting to redefine implementation + throw new Error('any-promise already defined as "'+registered.implementation+ + '". You can only register an implementation before the first '+ + ' call to require("any-promise") and an implementation cannot be changed') + } + + if(registered === null){ + // use provided implementation + if(implementation !== null && typeof opts.Promise !== 'undefined'){ + registered = { + Promise: opts.Promise, + implementation: implementation + } + } else { + // require implementation if implementation is specified but not provided + registered = loadImplementation(implementation) + } + + if(registerGlobal){ + // register preference globally in case multiple installations + root[REGISTRATION_KEY] = registered + } + } + + return registered + } +} diff --git a/node_modules/any-promise/optional.js b/node_modules/any-promise/optional.js new file mode 100644 index 00000000..f3889420 --- /dev/null +++ b/node_modules/any-promise/optional.js @@ -0,0 +1,6 @@ +"use strict"; +try { + module.exports = require('./register')().Promise || null +} catch(e) { + module.exports = null +} diff --git a/node_modules/any-promise/package.json b/node_modules/any-promise/package.json new file mode 100644 index 00000000..5baf14cf --- /dev/null +++ b/node_modules/any-promise/package.json @@ -0,0 +1,45 @@ +{ + "name": "any-promise", + "version": "1.3.0", + "description": "Resolve any installed ES6 compatible promise", + "main": "index.js", + "typings": "index.d.ts", + "browser": { + "./register.js": "./register-shim.js" + }, + "scripts": { + "test": "ava" + }, + "repository": { + "type": "git", + "url": "https://github.com/kevinbeaty/any-promise" + }, + "keywords": [ + "promise", + "es6" + ], + "author": "Kevin Beaty", + "license": "MIT", + "bugs": { + "url": "https://github.com/kevinbeaty/any-promise/issues" + }, + "homepage": "http://github.com/kevinbeaty/any-promise", + "dependencies": {}, + "devDependencies": { + "ava": "^0.14.0", + "bluebird": "^3.0.0", + "es6-promise": "^3.0.0", + "is-promise": "^2.0.0", + "lie": "^3.0.0", + "mocha": "^2.0.0", + "native-promise-only": "^0.8.0", + "phantomjs-prebuilt": "^2.0.0", + "pinkie": "^2.0.0", + "promise": "^7.0.0", + "q": "^1.0.0", + "rsvp": "^3.0.0", + "vow": "^0.4.0", + "when": "^3.0.0", + "zuul": "^3.0.0" + } +} diff --git a/node_modules/any-promise/register-shim.js b/node_modules/any-promise/register-shim.js new file mode 100644 index 00000000..9049405c --- /dev/null +++ b/node_modules/any-promise/register-shim.js @@ -0,0 +1,18 @@ +"use strict"; +module.exports = require('./loader')(window, loadImplementation) + +/** + * Browser specific loadImplementation. Always uses `window.Promise` + * + * To register a custom implementation, must register with `Promise` option. + */ +function loadImplementation(){ + if(typeof window.Promise === 'undefined'){ + throw new Error("any-promise browser requires a polyfill or explicit registration"+ + " e.g: require('any-promise/register/bluebird')") + } + return { + Promise: window.Promise, + implementation: 'window.Promise' + } +} diff --git a/node_modules/any-promise/register.d.ts b/node_modules/any-promise/register.d.ts new file mode 100644 index 00000000..97f2fc05 --- /dev/null +++ b/node_modules/any-promise/register.d.ts @@ -0,0 +1,17 @@ +import Promise = require('./index'); + +declare function register (module?: string, options?: register.Options): register.Register; + +declare namespace register { + export interface Register { + Promise: typeof Promise; + implementation: string; + } + + export interface Options { + Promise?: typeof Promise; + global?: boolean + } +} + +export = register; diff --git a/node_modules/any-promise/register.js b/node_modules/any-promise/register.js new file mode 100644 index 00000000..255c6e2f --- /dev/null +++ b/node_modules/any-promise/register.js @@ -0,0 +1,94 @@ +"use strict" +module.exports = require('./loader')(global, loadImplementation); + +/** + * Node.js version of loadImplementation. + * + * Requires the given implementation and returns the registration + * containing {Promise, implementation} + * + * If implementation is undefined or global.Promise, loads it + * Otherwise uses require + */ +function loadImplementation(implementation){ + var impl = null + + if(shouldPreferGlobalPromise(implementation)){ + // if no implementation or env specified use global.Promise + impl = { + Promise: global.Promise, + implementation: 'global.Promise' + } + } else if(implementation){ + // if implementation specified, require it + var lib = require(implementation) + impl = { + Promise: lib.Promise || lib, + implementation: implementation + } + } else { + // try to auto detect implementation. This is non-deterministic + // and should prefer other branches, but this is our last chance + // to load something without throwing error + impl = tryAutoDetect() + } + + if(impl === null){ + throw new Error('Cannot find any-promise implementation nor'+ + ' global.Promise. You must install polyfill or call'+ + ' require("any-promise/register") with your preferred'+ + ' implementation, e.g. require("any-promise/register/bluebird")'+ + ' on application load prior to any require("any-promise").') + } + + return impl +} + +/** + * Determines if the global.Promise should be preferred if an implementation + * has not been registered. + */ +function shouldPreferGlobalPromise(implementation){ + if(implementation){ + return implementation === 'global.Promise' + } else if(typeof global.Promise !== 'undefined'){ + // Load global promise if implementation not specified + // Versions < 0.11 did not have global Promise + // Do not use for version < 0.12 as version 0.11 contained buggy versions + var version = (/v(\d+)\.(\d+)\.(\d+)/).exec(process.version) + return !(version && +version[1] == 0 && +version[2] < 12) + } + + // do not have global.Promise or another implementation was specified + return false +} + +/** + * Look for common libs as last resort there is no guarantee that + * this will return a desired implementation or even be deterministic. + * The priority is also nearly arbitrary. We are only doing this + * for older versions of Node.js <0.12 that do not have a reasonable + * global.Promise implementation and we the user has not registered + * the preference. This preserves the behavior of any-promise <= 0.1 + * and may be deprecated or removed in the future + */ +function tryAutoDetect(){ + var libs = [ + "es6-promise", + "promise", + "native-promise-only", + "bluebird", + "rsvp", + "when", + "q", + "pinkie", + "lie", + "vow"] + var i = 0, len = libs.length + for(; i < len; i++){ + try { + return loadImplementation(libs[i]) + } catch(e){} + } + return null +} diff --git a/node_modules/any-promise/register/bluebird.d.ts b/node_modules/any-promise/register/bluebird.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/bluebird.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/bluebird.js b/node_modules/any-promise/register/bluebird.js new file mode 100644 index 00000000..de0f87eb --- /dev/null +++ b/node_modules/any-promise/register/bluebird.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('bluebird', {Promise: require('bluebird')}) diff --git a/node_modules/any-promise/register/es6-promise.d.ts b/node_modules/any-promise/register/es6-promise.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/es6-promise.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/es6-promise.js b/node_modules/any-promise/register/es6-promise.js new file mode 100644 index 00000000..59bd55b7 --- /dev/null +++ b/node_modules/any-promise/register/es6-promise.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('es6-promise', {Promise: require('es6-promise').Promise}) diff --git a/node_modules/any-promise/register/lie.d.ts b/node_modules/any-promise/register/lie.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/lie.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/lie.js b/node_modules/any-promise/register/lie.js new file mode 100644 index 00000000..7d305ca4 --- /dev/null +++ b/node_modules/any-promise/register/lie.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('lie', {Promise: require('lie')}) diff --git a/node_modules/any-promise/register/native-promise-only.d.ts b/node_modules/any-promise/register/native-promise-only.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/native-promise-only.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/native-promise-only.js b/node_modules/any-promise/register/native-promise-only.js new file mode 100644 index 00000000..70a5a5e1 --- /dev/null +++ b/node_modules/any-promise/register/native-promise-only.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('native-promise-only', {Promise: require('native-promise-only')}) diff --git a/node_modules/any-promise/register/pinkie.d.ts b/node_modules/any-promise/register/pinkie.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/pinkie.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/pinkie.js b/node_modules/any-promise/register/pinkie.js new file mode 100644 index 00000000..caaf98a5 --- /dev/null +++ b/node_modules/any-promise/register/pinkie.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('pinkie', {Promise: require('pinkie')}) diff --git a/node_modules/any-promise/register/promise.d.ts b/node_modules/any-promise/register/promise.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/promise.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/promise.js b/node_modules/any-promise/register/promise.js new file mode 100644 index 00000000..746620d4 --- /dev/null +++ b/node_modules/any-promise/register/promise.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('promise', {Promise: require('promise')}) diff --git a/node_modules/any-promise/register/q.d.ts b/node_modules/any-promise/register/q.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/q.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/q.js b/node_modules/any-promise/register/q.js new file mode 100644 index 00000000..0fc633a9 --- /dev/null +++ b/node_modules/any-promise/register/q.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('q', {Promise: require('q').Promise}) diff --git a/node_modules/any-promise/register/rsvp.d.ts b/node_modules/any-promise/register/rsvp.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/rsvp.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/rsvp.js b/node_modules/any-promise/register/rsvp.js new file mode 100644 index 00000000..02b13180 --- /dev/null +++ b/node_modules/any-promise/register/rsvp.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('rsvp', {Promise: require('rsvp').Promise}) diff --git a/node_modules/any-promise/register/vow.d.ts b/node_modules/any-promise/register/vow.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/vow.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/vow.js b/node_modules/any-promise/register/vow.js new file mode 100644 index 00000000..5b6868c4 --- /dev/null +++ b/node_modules/any-promise/register/vow.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('vow', {Promise: require('vow').Promise}) diff --git a/node_modules/any-promise/register/when.d.ts b/node_modules/any-promise/register/when.d.ts new file mode 100644 index 00000000..336ce12b --- /dev/null +++ b/node_modules/any-promise/register/when.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/when.js b/node_modules/any-promise/register/when.js new file mode 100644 index 00000000..d91c13d3 --- /dev/null +++ b/node_modules/any-promise/register/when.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('when', {Promise: require('when').Promise}) diff --git a/node_modules/anymatch/LICENSE b/node_modules/anymatch/LICENSE new file mode 100644 index 00000000..bc424705 --- /dev/null +++ b/node_modules/anymatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2014 Elan Shanker + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/anymatch/README.md b/node_modules/anymatch/README.md new file mode 100644 index 00000000..f674f407 --- /dev/null +++ b/node_modules/anymatch/README.md @@ -0,0 +1,99 @@ +anymatch [![Build Status](https://travis-ci.org/micromatch/anymatch.svg?branch=master)](https://travis-ci.org/micromatch/anymatch) [![Coverage Status](https://img.shields.io/coveralls/micromatch/anymatch.svg?branch=master)](https://coveralls.io/r/micromatch/anymatch?branch=master) +====== +Javascript module to match a string against a regular expression, glob, string, +or function that takes the string as an argument and returns a truthy or falsy +value. The matcher can also be an array of any or all of these. Useful for +allowing a very flexible user-defined config to define things like file paths. + +__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__ + +[![NPM](https://nodei.co/npm/anymatch.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/anymatch/) +[![NPM](https://nodei.co/npm-dl/anymatch.png?height=3&months=9)](https://nodei.co/npm-dl/anymatch/) + +Usage +----- +```sh +npm install anymatch --save +``` + +#### anymatch (matchers, testString, [returnIndex], [startIndex], [endIndex]) +* __matchers__: (_Array|String|RegExp|Function_) +String to be directly matched, string with glob patterns, regular expression +test, function that takes the testString as an argument and returns a truthy +value if it should be matched, or an array of any number and mix of these types. +* __testString__: (_String|Array_) The string to test against the matchers. If +passed as an array, the first element of the array will be used as the +`testString` for non-function matchers, while the entire array will be applied +as the arguments for function matchers. +* __returnIndex__: (_Boolean [optional]_) If true, return the array index of +the first matcher that that testString matched, or -1 if no match, instead of a +boolean result. +* __startIndex, endIndex__: (_Integer [optional]_) Can be used to define a +subset out of the array of provided matchers to test against. Can be useful +with bound matcher functions (see below). When used with `returnIndex = true` +preserves original indexing. Behaves the same as `Array.prototype.slice` (i.e. +includes array members up to, but not including endIndex). + +```js +var anymatch = require('anymatch'); + +var matchers = [ + 'path/to/file.js', + 'path/anyjs/**/*.js', + /foo\.js$/, + function (string) { + return string.indexOf('bar') !== -1 && string.length > 10 + } +]; + +anymatch(matchers, 'path/to/file.js'); // true +anymatch(matchers, 'path/anyjs/baz.js'); // true +anymatch(matchers, 'path/to/foo.js'); // true +anymatch(matchers, 'path/to/bar.js'); // true +anymatch(matchers, 'bar.js'); // false + +// returnIndex = true +anymatch(matchers, 'foo.js', true); // 2 +anymatch(matchers, 'path/anyjs/foo.js', true); // 1 + +// skip matchers +anymatch(matchers, 'path/to/file.js', false, 1); // false +anymatch(matchers, 'path/anyjs/foo.js', true, 2, 3); // 2 +anymatch(matchers, 'path/to/bar.js', true, 0, 3); // -1 + +// using globs to match directories and their children +anymatch('node_modules', 'node_modules'); // true +anymatch('node_modules', 'node_modules/somelib/index.js'); // false +anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true +anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false +anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true +``` + +#### anymatch (matchers) +You can also pass in only your matcher(s) to get a curried function that has +already been bound to the provided matching criteria. This can be used as an +`Array.prototype.filter` callback. + +```js +var matcher = anymatch(matchers); + +matcher('path/to/file.js'); // true +matcher('path/anyjs/baz.js', true); // 1 +matcher('path/anyjs/baz.js', true, 2); // -1 + +['foo.js', 'bar.js'].filter(matcher); // ['foo.js'] +``` + +Change Log +---------- +[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases) + +NOTE: As of v2.0.0, [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information). + +NOTE: As of v1.2.0, anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch) +for glob pattern matching. Issues with glob pattern matching should be +reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues). + +License +------- +[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE) diff --git a/node_modules/anymatch/index.js b/node_modules/anymatch/index.js new file mode 100644 index 00000000..e4116185 --- /dev/null +++ b/node_modules/anymatch/index.js @@ -0,0 +1,67 @@ +'use strict'; + +var micromatch = require('micromatch'); +var normalize = require('normalize-path'); +var path = require('path'); // required for tests. +var arrify = function(a) { return a == null ? [] : (Array.isArray(a) ? a : [a]); }; + +var anymatch = function(criteria, value, returnIndex, startIndex, endIndex) { + criteria = arrify(criteria); + value = arrify(value); + if (arguments.length === 1) { + return anymatch.bind(null, criteria.map(function(criterion) { + return typeof criterion === 'string' && criterion[0] !== '!' ? + micromatch.matcher(criterion) : criterion; + })); + } + startIndex = startIndex || 0; + var string = value[0]; + var altString, altValue; + var matched = false; + var matchIndex = -1; + function testCriteria(criterion, index) { + var result; + switch (Object.prototype.toString.call(criterion)) { + case '[object String]': + result = string === criterion || altString && altString === criterion; + result = result || micromatch.isMatch(string, criterion); + break; + case '[object RegExp]': + result = criterion.test(string) || altString && criterion.test(altString); + break; + case '[object Function]': + result = criterion.apply(null, value); + result = result || altValue && criterion.apply(null, altValue); + break; + default: + result = false; + } + if (result) { + matchIndex = index + startIndex; + } + return result; + } + var crit = criteria; + var negGlobs = crit.reduce(function(arr, criterion, index) { + if (typeof criterion === 'string' && criterion[0] === '!') { + if (crit === criteria) { + // make a copy before modifying + crit = crit.slice(); + } + crit[index] = null; + arr.push(criterion.substr(1)); + } + return arr; + }, []); + if (!negGlobs.length || !micromatch.any(string, negGlobs)) { + if (path.sep === '\\' && typeof string === 'string') { + altString = normalize(string); + altString = altString === string ? null : altString; + if (altString) altValue = [altString].concat(value.slice(1)); + } + matched = crit.slice(startIndex, endIndex).some(testCriteria); + } + return returnIndex === true ? matchIndex : matched; +}; + +module.exports = anymatch; diff --git a/node_modules/anymatch/node_modules/normalize-path/LICENSE b/node_modules/anymatch/node_modules/normalize-path/LICENSE new file mode 100644 index 00000000..d734237b --- /dev/null +++ b/node_modules/anymatch/node_modules/normalize-path/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/anymatch/node_modules/normalize-path/README.md b/node_modules/anymatch/node_modules/normalize-path/README.md new file mode 100644 index 00000000..daa0edda --- /dev/null +++ b/node_modules/anymatch/node_modules/normalize-path/README.md @@ -0,0 +1,92 @@ +# normalize-path [![NPM version](https://img.shields.io/npm/v/normalize-path.svg?style=flat)](https://www.npmjs.com/package/normalize-path) [![NPM monthly downloads](https://img.shields.io/npm/dm/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![NPM total downloads](https://img.shields.io/npm/dt/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/normalize-path.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/normalize-path) + +> Normalize file path slashes to be unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes unless disabled. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save normalize-path +``` + +## Usage + +```js +var normalize = require('normalize-path'); + +normalize('\\foo\\bar\\baz\\'); +//=> '/foo/bar/baz' + +normalize('./foo/bar/baz/'); +//=> './foo/bar/baz' +``` + +Pass `false` as the last argument to **keep** trailing slashes: + +```js +normalize('./foo/bar/baz/', false); +//=> './foo/bar/baz/' + +normalize('foo\\bar\\baz\\', false); +//=> 'foo/bar/baz/' +``` + +## About + +### Related projects + +* [contains-path](https://www.npmjs.com/package/contains-path): Return true if a file path contains the given path. | [homepage](https://github.com/jonschlinkert/contains-path "Return true if a file path contains the given path.") +* [ends-with](https://www.npmjs.com/package/ends-with): Returns `true` if the given `string` or `array` ends with `suffix` using strict equality for… [more](https://github.com/jonschlinkert/ends-with) | [homepage](https://github.com/jonschlinkert/ends-with "Returns `true` if the given `string` or `array` ends with `suffix` using strict equality for comparisons.") +* [is-absolute](https://www.npmjs.com/package/is-absolute): Polyfill for node.js `path.isAbolute`. Returns true if a file path is absolute. | [homepage](https://github.com/jonschlinkert/is-absolute "Polyfill for node.js `path.isAbolute`. Returns true if a file path is absolute.") +* [is-relative](https://www.npmjs.com/package/is-relative): Returns `true` if the path appears to be relative. | [homepage](https://github.com/jonschlinkert/is-relative "Returns `true` if the path appears to be relative.") +* [parse-filepath](https://www.npmjs.com/package/parse-filepath): Pollyfill for node.js `path.parse`, parses a filepath into an object. | [homepage](https://github.com/jonschlinkert/parse-filepath "Pollyfill for node.js `path.parse`, parses a filepath into an object.") +* [path-ends-with](https://www.npmjs.com/package/path-ends-with): Return `true` if a file path ends with the given string/suffix. | [homepage](https://github.com/jonschlinkert/path-ends-with "Return `true` if a file path ends with the given string/suffix.") +* [path-segments](https://www.npmjs.com/package/path-segments): Get n specific segments of a file path, e.g. first 2, last 3, etc. | [homepage](https://github.com/jonschlinkert/path-segments "Get n specific segments of a file path, e.g. first 2, last 3, etc.") +* [rewrite-ext](https://www.npmjs.com/package/rewrite-ext): Automatically re-write the destination extension of a filepath based on the source extension. e.g… [more](https://github.com/jonschlinkert/rewrite-ext) | [homepage](https://github.com/jonschlinkert/rewrite-ext "Automatically re-write the destination extension of a filepath based on the source extension. e.g `.coffee` => `.js`. This will only rename the ext, no other path parts are modified.") +* [unixify](https://www.npmjs.com/package/unixify): Convert Windows file paths to unix paths. | [homepage](https://github.com/jonschlinkert/unixify "Convert Windows file paths to unix paths.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 31 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [phated](https://github.com/phated) | + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.4.3, on March 29, 2017._ \ No newline at end of file diff --git a/node_modules/anymatch/node_modules/normalize-path/index.js b/node_modules/anymatch/node_modules/normalize-path/index.js new file mode 100644 index 00000000..4a4f8ccd --- /dev/null +++ b/node_modules/anymatch/node_modules/normalize-path/index.js @@ -0,0 +1,19 @@ +/*! + * normalize-path + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +var removeTrailingSeparator = require('remove-trailing-separator'); + +module.exports = function normalizePath(str, stripTrailing) { + if (typeof str !== 'string') { + throw new TypeError('expected a string'); + } + str = str.replace(/[\\\/]+/g, '/'); + if (stripTrailing !== false) { + str = removeTrailingSeparator(str); + } + return str; +}; diff --git a/node_modules/anymatch/node_modules/normalize-path/package.json b/node_modules/anymatch/node_modules/normalize-path/package.json new file mode 100644 index 00000000..c16ef9d2 --- /dev/null +++ b/node_modules/anymatch/node_modules/normalize-path/package.json @@ -0,0 +1,78 @@ +{ + "name": "normalize-path", + "description": "Normalize file path slashes to be unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes unless disabled.", + "version": "2.1.1", + "homepage": "https://github.com/jonschlinkert/normalize-path", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Blaine Bublitz (https://twitter.com/BlaineBublitz)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/normalize-path", + "bugs": { + "url": "https://github.com/jonschlinkert/normalize-path/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "remove-trailing-separator": "^1.0.1" + }, + "devDependencies": { + "benchmarked": "^0.1.1", + "gulp-format-md": "^0.1.11", + "minimist": "^1.2.0", + "mocha": "*" + }, + "keywords": [ + "backslash", + "file", + "filepath", + "fix", + "forward", + "fp", + "fs", + "normalize", + "path", + "slash", + "slashes", + "trailing", + "unix", + "urix" + ], + "verb": { + "related": { + "list": [ + "contains-path", + "ends-with", + "is-absolute", + "is-relative", + "parse-filepath", + "path-ends-with", + "path-segments", + "rewrite-ext", + "unixify" + ], + "description": "Other useful libraries for working with paths in node.js:" + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/anymatch/package.json b/node_modules/anymatch/package.json new file mode 100644 index 00000000..fdbafd76 --- /dev/null +++ b/node_modules/anymatch/package.json @@ -0,0 +1,47 @@ +{ + "name": "anymatch", + "version": "2.0.0", + "description": "Matches strings against configurable strings, globs, regular expressions, and/or functions", + "files": [ + "index.js" + ], + "author": { + "name": "Elan Shanker", + "url": "http://github.com/es128" + }, + "license": "ISC", + "homepage": "https://github.com/micromatch/anymatch", + "repository": { + "type": "git", + "url": "https://github.com/micromatch/anymatch" + }, + "bugs": { + "url": "https://github.com/micromatch/anymatch/issues" + }, + "keywords": [ + "match", + "any", + "string", + "file", + "fs", + "list", + "glob", + "regex", + "regexp", + "regular", + "expression", + "function" + ], + "scripts": { + "test": "istanbul cover _mocha && cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "micromatch": "^3.1.4", + "normalize-path": "^2.1.1" + }, + "devDependencies": { + "coveralls": "^2.7.0", + "istanbul": "^0.4.5", + "mocha": "^3.0.0" + } +} diff --git a/node_modules/aproba/LICENSE b/node_modules/aproba/LICENSE new file mode 100644 index 00000000..f4be44d8 --- /dev/null +++ b/node_modules/aproba/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/aproba/README.md b/node_modules/aproba/README.md new file mode 100644 index 00000000..0bfc594c --- /dev/null +++ b/node_modules/aproba/README.md @@ -0,0 +1,94 @@ +aproba +====== + +A ridiculously light-weight function argument validator + +``` +var validate = require("aproba") + +function myfunc(a, b, c) { + // `a` must be a string, `b` a number, `c` a function + validate('SNF', arguments) // [a,b,c] is also valid +} + +myfunc('test', 23, function () {}) // ok +myfunc(123, 23, function () {}) // type error +myfunc('test', 23) // missing arg error +myfunc('test', 23, function () {}, true) // too many args error + +``` + +Valid types are: + +| type | description +| :--: | :---------- +| * | matches any type +| A | `Array.isArray` OR an `arguments` object +| S | typeof == string +| N | typeof == number +| F | typeof == function +| O | typeof == object and not type A and not type E +| B | typeof == boolean +| E | `instanceof Error` OR `null` **(special: see below)** +| Z | == `null` + +Validation failures throw one of three exception types, distinguished by a +`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. + +If you pass in an invalid type then it will throw with a code of +`EUNKNOWNTYPE`. + +If an **error** argument is found and is not null then the remaining +arguments are optional. That is, if you say `ESO` then that's like using a +non-magical `E` in: `E|ESO|ZSO`. + +### But I have optional arguments?! + +You can provide more than one signature by separating them with pipes `|`. +If any signature matches the arguments then they'll be considered valid. + +So for example, say you wanted to write a signature for +`fs.createWriteStream`. The docs for it describe it thusly: + +``` +fs.createWriteStream(path[, options]) +``` + +This would be a signature of `SO|S`. That is, a string and and object, or +just a string. + +Now, if you read the full `fs` docs, you'll see that actually path can ALSO +be a buffer. And options can be a string, that is: +``` +path | +options | +``` + +To reproduce this you have to fully enumerate all of the possible +combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The +awkwardness is a feature: It reminds you of the complexity you're adding to +your API when you do this sort of thing. + + +### Browser support + +This has no dependencies and should work in browsers, though you'll have +noisier stack traces. + +### Why this exists + +I wanted a very simple argument validator. It needed to do two things: + +1. Be more concise and easier to use than assertions + +2. Not encourage an infinite bikeshed of DSLs + +This is why types are specified by a single character and there's no such +thing as an optional argument. + +This is not intended to validate user data. This is specifically about +asserting the interface of your functions. + +If you need greater validation, I encourage you to write them by hand or +look elsewhere. + diff --git a/node_modules/aproba/index.js b/node_modules/aproba/index.js new file mode 100644 index 00000000..6f3f797c --- /dev/null +++ b/node_modules/aproba/index.js @@ -0,0 +1,105 @@ +'use strict' + +function isArguments (thingy) { + return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee') +} + +var types = { + '*': {label: 'any', check: function () { return true }}, + A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }}, + S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }}, + N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }}, + F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }}, + O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }}, + B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }}, + E: {label: 'error', check: function (thingy) { return thingy instanceof Error }}, + Z: {label: 'null', check: function (thingy) { return thingy == null }} +} + +function addSchema (schema, arity) { + var group = arity[schema.length] = arity[schema.length] || [] + if (group.indexOf(schema) === -1) group.push(schema) +} + +var validate = module.exports = function (rawSchemas, args) { + if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length) + if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas') + if (!args) throw missingRequiredArg(1, 'args') + if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas) + if (!types.A.check(args)) throw invalidType(1, ['array'], args) + var schemas = rawSchemas.split('|') + var arity = {} + + schemas.forEach(function (schema) { + for (var ii = 0; ii < schema.length; ++ii) { + var type = schema[ii] + if (!types[type]) throw unknownType(ii, type) + } + if (/E.*E/.test(schema)) throw moreThanOneError(schema) + addSchema(schema, arity) + if (/E/.test(schema)) { + addSchema(schema.replace(/E.*$/, 'E'), arity) + addSchema(schema.replace(/E/, 'Z'), arity) + if (schema.length === 1) addSchema('', arity) + } + }) + var matching = arity[args.length] + if (!matching) { + throw wrongNumberOfArgs(Object.keys(arity), args.length) + } + for (var ii = 0; ii < args.length; ++ii) { + var newMatching = matching.filter(function (schema) { + var type = schema[ii] + var typeCheck = types[type].check + return typeCheck(args[ii]) + }) + if (!newMatching.length) { + var labels = matching.map(function (schema) { + return types[schema[ii]].label + }).filter(function (schema) { return schema != null }) + throw invalidType(ii, labels, args[ii]) + } + matching = newMatching + } +} + +function missingRequiredArg (num) { + return newException('EMISSINGARG', 'Missing required argument #' + (num + 1)) +} + +function unknownType (num, type) { + return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1)) +} + +function invalidType (num, expectedTypes, value) { + var valueType + Object.keys(types).forEach(function (typeCode) { + if (types[typeCode].check(value)) valueType = types[typeCode].label + }) + return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + + englishList(expectedTypes) + ' but got ' + valueType) +} + +function englishList (list) { + return list.join(', ').replace(/, ([^,]+)$/, ' or $1') +} + +function wrongNumberOfArgs (expected, got) { + var english = englishList(expected) + var args = expected.every(function (ex) { return ex.length === 1 }) + ? 'argument' + : 'arguments' + return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got) +} + +function moreThanOneError (schema) { + return newException('ETOOMANYERRORTYPES', + 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"') +} + +function newException (code, msg) { + var e = new Error(msg) + e.code = code + if (Error.captureStackTrace) Error.captureStackTrace(e, validate) + return e +} diff --git a/node_modules/aproba/package.json b/node_modules/aproba/package.json new file mode 100644 index 00000000..f008787b --- /dev/null +++ b/node_modules/aproba/package.json @@ -0,0 +1,34 @@ +{ + "name": "aproba", + "version": "1.2.0", + "description": "A ridiculously light-weight argument validator (now browser friendly)", + "main": "index.js", + "directories": { + "test": "test" + }, + "dependencies": {}, + "devDependencies": { + "standard": "^10.0.3", + "tap": "^10.0.2" + }, + "files": [ + "index.js" + ], + "scripts": { + "test": "standard && tap -j3 test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/aproba" + }, + "keywords": [ + "argument", + "validate" + ], + "author": "Rebecca Turner ", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/aproba/issues" + }, + "homepage": "https://github.com/iarna/aproba" +} diff --git a/node_modules/are-we-there-yet/CHANGES.md b/node_modules/are-we-there-yet/CHANGES.md new file mode 100644 index 00000000..21f3b1c1 --- /dev/null +++ b/node_modules/are-we-there-yet/CHANGES.md @@ -0,0 +1,37 @@ +Hi, figured we could actually use a changelog now: + +## 1.1.5 2018-05-24 + +* [#92](https://github.com/iarna/are-we-there-yet/pull/92) Fix bug where + `finish` would throw errors when including `TrackerStream` objects in + `TrackerGroup` collections. (@brianloveswords) + +## 1.1.4 2017-04-21 + +* Fix typo in package.json + +## 1.1.3 2017-04-21 + +* Improve documentation and limit files included in the distribution. + +## 1.1.2 2016-03-15 + +* Add tracker group cycle detection and tests for it + +## 1.1.1 2016-01-29 + +* Fix a typo in stream completion tracker + +## 1.1.0 2016-01-29 + +* Rewrote completion percent computation to be low impact– no more walking a + tree of completion groups every time we need this info. Previously, with + medium sized tree of completion groups, even a relatively modest number of + calls to the top level `completed()` method would result in absurd numbers + of calls overall as it walked down the tree. We now, instead, keep track as + we bubble up changes, so the computation is limited to when data changes and + to the depth of that one branch, instead of _every_ node. (Plus, we were already + incurring _this_ cost, since we already bubbled out changes.) +* Moved different tracker types out to their own files. +* Made tests test for TOO MANY events too. +* Standarized the source code formatting diff --git a/node_modules/are-we-there-yet/LICENSE b/node_modules/are-we-there-yet/LICENSE new file mode 100644 index 00000000..af458806 --- /dev/null +++ b/node_modules/are-we-there-yet/LICENSE @@ -0,0 +1,5 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/are-we-there-yet/README.md b/node_modules/are-we-there-yet/README.md new file mode 100644 index 00000000..7e2b42d8 --- /dev/null +++ b/node_modules/are-we-there-yet/README.md @@ -0,0 +1,195 @@ +are-we-there-yet +---------------- + +Track complex hiearchies of asynchronous task completion statuses. This is +intended to give you a way of recording and reporting the progress of the big +recursive fan-out and gather type workflows that are so common in async. + +What you do with this completion data is up to you, but the most common use case is to +feed it to one of the many progress bar modules. + +Most progress bar modules include a rudamentary version of this, but my +needs were more complex. + +Usage +===== + +```javascript +var TrackerGroup = require("are-we-there-yet").TrackerGroup + +var top = new TrackerGroup("program") + +var single = top.newItem("one thing", 100) +single.completeWork(20) + +console.log(top.completed()) // 0.2 + +fs.stat("file", function(er, stat) { + if (er) throw er + var stream = top.newStream("file", stat.size) + console.log(top.completed()) // now 0.1 as single is 50% of the job and is 20% complete + // and 50% * 20% == 10% + fs.createReadStream("file").pipe(stream).on("data", function (chunk) { + // do stuff with chunk + }) + top.on("change", function (name) { + // called each time a chunk is read from "file" + // top.completed() will start at 0.1 and fill up to 0.6 as the file is read + }) +}) +``` + +Shared Methods +============== + +* var completed = tracker.completed() + +Implemented in: `Tracker`, `TrackerGroup`, `TrackerStream` + +Returns the ratio of completed work to work to be done. Range of 0 to 1. + +* tracker.finish() + +Implemented in: `Tracker`, `TrackerGroup` + +Marks the tracker as completed. With a TrackerGroup this marks all of its +components as completed. + +Marks all of the components of this tracker as finished, which in turn means +that `tracker.completed()` for this will now be 1. + +This will result in one or more `change` events being emitted. + +Events +====== + +All tracker objects emit `change` events with the following arguments: + +``` +function (name, completed, tracker) +``` + +`name` is the name of the tracker that originally emitted the event, +or if it didn't have one, the first containing tracker group that had one. + +`completed` is the percent complete (as returned by `tracker.completed()` method). + +`tracker` is the tracker object that you are listening for events on. + +TrackerGroup +============ + +* var tracker = new TrackerGroup(**name**) + + * **name** *(optional)* - The name of this tracker group, used in change + notifications if the component updating didn't have a name. Defaults to undefined. + +Creates a new empty tracker aggregation group. These are trackers whose +completion status is determined by the completion status of other trackers. + +* tracker.addUnit(**otherTracker**, **weight**) + + * **otherTracker** - Any of the other are-we-there-yet tracker objects + * **weight** *(optional)* - The weight to give the tracker, defaults to 1. + +Adds the **otherTracker** to this aggregation group. The weight determines +how long you expect this tracker to take to complete in proportion to other +units. So for instance, if you add one tracker with a weight of 1 and +another with a weight of 2, you're saying the second will take twice as long +to complete as the first. As such, the first will account for 33% of the +completion of this tracker and the second will account for the other 67%. + +Returns **otherTracker**. + +* var subGroup = tracker.newGroup(**name**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subGroup = tracker.addUnit(new TrackerGroup(name), weight) +``` + +* var subItem = tracker.newItem(**name**, **todo**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subItem = tracker.addUnit(new Tracker(name, todo), weight) +``` + +* var subStream = tracker.newStream(**name**, **todo**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subStream = tracker.addUnit(new TrackerStream(name, todo), weight) +``` + +* console.log( tracker.debug() ) + +Returns a tree showing the completion of this tracker group and all of its +children, including recursively entering all of the children. + +Tracker +======= + +* var tracker = new Tracker(**name**, **todo**) + + * **name** *(optional)* The name of this counter to report in change + events. Defaults to undefined. + * **todo** *(optional)* The amount of work todo (a number). Defaults to 0. + +Ordinarily these are constructed as a part of a tracker group (via +`newItem`). + +* var completed = tracker.completed() + +Returns the ratio of completed work to work to be done. Range of 0 to 1. If +total work to be done is 0 then it will return 0. + +* tracker.addWork(**todo**) + + * **todo** A number to add to the amount of work to be done. + +Increases the amount of work to be done, thus decreasing the completion +percentage. Triggers a `change` event. + +* tracker.completeWork(**completed**) + + * **completed** A number to add to the work complete + +Increase the amount of work complete, thus increasing the completion percentage. +Will never increase the work completed past the amount of work todo. That is, +percentages > 100% are not allowed. Triggers a `change` event. + +* tracker.finish() + +Marks this tracker as finished, tracker.completed() will now be 1. Triggers +a `change` event. + +TrackerStream +============= + +* var tracker = new TrackerStream(**name**, **size**, **options**) + + * **name** *(optional)* The name of this counter to report in change + events. Defaults to undefined. + * **size** *(optional)* The number of bytes being sent through this stream. + * **options** *(optional)* A hash of stream options + +The tracker stream object is a pass through stream that updates an internal +tracker object each time a block passes through. It's intended to track +downloads, file extraction and other related activities. You use it by piping +your data source into it and then using it as your data source. + +If your data has a length attribute then that's used as the amount of work +completed when the chunk is passed through. If it does not (eg, object +streams) then each chunk counts as completing 1 unit of work, so your size +should be the total number of objects being streamed. + +* tracker.addWork(**todo**) + + * **todo** Increase the expected overall size by **todo** bytes. + +Increases the amount of work to be done, thus decreasing the completion +percentage. Triggers a `change` event. diff --git a/node_modules/are-we-there-yet/index.js b/node_modules/are-we-there-yet/index.js new file mode 100644 index 00000000..57d8743f --- /dev/null +++ b/node_modules/are-we-there-yet/index.js @@ -0,0 +1,4 @@ +'use strict' +exports.TrackerGroup = require('./tracker-group.js') +exports.Tracker = require('./tracker.js') +exports.TrackerStream = require('./tracker-stream.js') diff --git a/node_modules/are-we-there-yet/package.json b/node_modules/are-we-there-yet/package.json new file mode 100644 index 00000000..c5990c9b --- /dev/null +++ b/node_modules/are-we-there-yet/package.json @@ -0,0 +1,35 @@ +{ + "name": "are-we-there-yet", + "version": "1.1.5", + "description": "Keep track of the overall completion of many disparate processes", + "main": "index.js", + "scripts": { + "test": "standard && tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/are-we-there-yet.git" + }, + "author": "Rebecca Turner (http://re-becca.org)", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/are-we-there-yet/issues" + }, + "homepage": "https://github.com/iarna/are-we-there-yet", + "devDependencies": { + "standard": "^11.0.1", + "tap": "^12.0.1" + }, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + }, + "files": [ + "index.js", + "tracker-base.js", + "tracker-group.js", + "tracker-stream.js", + "tracker.js", + "CHANGES.md" + ] +} diff --git a/node_modules/are-we-there-yet/tracker-base.js b/node_modules/are-we-there-yet/tracker-base.js new file mode 100644 index 00000000..6f436875 --- /dev/null +++ b/node_modules/are-we-there-yet/tracker-base.js @@ -0,0 +1,11 @@ +'use strict' +var EventEmitter = require('events').EventEmitter +var util = require('util') + +var trackerId = 0 +var TrackerBase = module.exports = function (name) { + EventEmitter.call(this) + this.id = ++trackerId + this.name = name +} +util.inherits(TrackerBase, EventEmitter) diff --git a/node_modules/are-we-there-yet/tracker-group.js b/node_modules/are-we-there-yet/tracker-group.js new file mode 100644 index 00000000..9759e122 --- /dev/null +++ b/node_modules/are-we-there-yet/tracker-group.js @@ -0,0 +1,107 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') +var Tracker = require('./tracker.js') +var TrackerStream = require('./tracker-stream.js') + +var TrackerGroup = module.exports = function (name) { + TrackerBase.call(this, name) + this.parentGroup = null + this.trackers = [] + this.completion = {} + this.weight = {} + this.totalWeight = 0 + this.finished = false + this.bubbleChange = bubbleChange(this) +} +util.inherits(TrackerGroup, TrackerBase) + +function bubbleChange (trackerGroup) { + return function (name, completed, tracker) { + trackerGroup.completion[tracker.id] = completed + if (trackerGroup.finished) return + trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) + } +} + +TrackerGroup.prototype.nameInTree = function () { + var names = [] + var from = this + while (from) { + names.unshift(from.name) + from = from.parentGroup + } + return names.join('/') +} + +TrackerGroup.prototype.addUnit = function (unit, weight) { + if (unit.addUnit) { + var toTest = this + while (toTest) { + if (unit === toTest) { + throw new Error( + 'Attempted to add tracker group ' + + unit.name + ' to tree that already includes it ' + + this.nameInTree(this)) + } + toTest = toTest.parentGroup + } + unit.parentGroup = this + } + this.weight[unit.id] = weight || 1 + this.totalWeight += this.weight[unit.id] + this.trackers.push(unit) + this.completion[unit.id] = unit.completed() + unit.on('change', this.bubbleChange) + if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit) + return unit +} + +TrackerGroup.prototype.completed = function () { + if (this.trackers.length === 0) return 0 + var valPerWeight = 1 / this.totalWeight + var completed = 0 + for (var ii = 0; ii < this.trackers.length; ii++) { + var trackerId = this.trackers[ii].id + completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId] + } + return completed +} + +TrackerGroup.prototype.newGroup = function (name, weight) { + return this.addUnit(new TrackerGroup(name), weight) +} + +TrackerGroup.prototype.newItem = function (name, todo, weight) { + return this.addUnit(new Tracker(name, todo), weight) +} + +TrackerGroup.prototype.newStream = function (name, todo, weight) { + return this.addUnit(new TrackerStream(name, todo), weight) +} + +TrackerGroup.prototype.finish = function () { + this.finished = true + if (!this.trackers.length) this.addUnit(new Tracker(), 1, true) + for (var ii = 0; ii < this.trackers.length; ii++) { + var tracker = this.trackers[ii] + tracker.finish() + tracker.removeListener('change', this.bubbleChange) + } + this.emit('change', this.name, 1, this) +} + +var buffer = ' ' +TrackerGroup.prototype.debug = function (depth) { + depth = depth || 0 + var indent = depth ? buffer.substr(0, depth) : '' + var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' + this.trackers.forEach(function (tracker) { + if (tracker instanceof TrackerGroup) { + output += tracker.debug(depth + 1) + } else { + output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' + } + }) + return output +} diff --git a/node_modules/are-we-there-yet/tracker-stream.js b/node_modules/are-we-there-yet/tracker-stream.js new file mode 100644 index 00000000..e1cf8505 --- /dev/null +++ b/node_modules/are-we-there-yet/tracker-stream.js @@ -0,0 +1,36 @@ +'use strict' +var util = require('util') +var stream = require('readable-stream') +var delegate = require('delegates') +var Tracker = require('./tracker.js') + +var TrackerStream = module.exports = function (name, size, options) { + stream.Transform.call(this, options) + this.tracker = new Tracker(name, size) + this.name = name + this.id = this.tracker.id + this.tracker.on('change', delegateChange(this)) +} +util.inherits(TrackerStream, stream.Transform) + +function delegateChange (trackerStream) { + return function (name, completion, tracker) { + trackerStream.emit('change', name, completion, trackerStream) + } +} + +TrackerStream.prototype._transform = function (data, encoding, cb) { + this.tracker.completeWork(data.length ? data.length : 1) + this.push(data) + cb() +} + +TrackerStream.prototype._flush = function (cb) { + this.tracker.finish() + cb() +} + +delegate(TrackerStream.prototype, 'tracker') + .method('completed') + .method('addWork') + .method('finish') diff --git a/node_modules/are-we-there-yet/tracker.js b/node_modules/are-we-there-yet/tracker.js new file mode 100644 index 00000000..68c2339b --- /dev/null +++ b/node_modules/are-we-there-yet/tracker.js @@ -0,0 +1,30 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') + +var Tracker = module.exports = function (name, todo) { + TrackerBase.call(this, name) + this.workDone = 0 + this.workTodo = todo || 0 +} +util.inherits(Tracker, TrackerBase) + +Tracker.prototype.completed = function () { + return this.workTodo === 0 ? 0 : this.workDone / this.workTodo +} + +Tracker.prototype.addWork = function (work) { + this.workTodo += work + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.completeWork = function (work) { + this.workDone += work + if (this.workDone > this.workTodo) this.workDone = this.workTodo + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.finish = function () { + this.workTodo = this.workDone = 1 + this.emit('change', this.name, 1, this) +} diff --git a/node_modules/arr-diff/LICENSE b/node_modules/arr-diff/LICENSE new file mode 100755 index 00000000..d734237b --- /dev/null +++ b/node_modules/arr-diff/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/arr-diff/README.md b/node_modules/arr-diff/README.md new file mode 100644 index 00000000..961f5c3f --- /dev/null +++ b/node_modules/arr-diff/README.md @@ -0,0 +1,130 @@ +# arr-diff [![NPM version](https://img.shields.io/npm/v/arr-diff.svg?style=flat)](https://www.npmjs.com/package/arr-diff) [![NPM monthly downloads](https://img.shields.io/npm/dm/arr-diff.svg?style=flat)](https://npmjs.org/package/arr-diff) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/arr-diff.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/arr-diff) + +> Returns an array with only the unique values from the first array, by excluding all values from additional arrays using strict equality for comparisons. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save arr-diff +``` + +Install with [yarn](https://yarnpkg.com): + +```sh +$ yarn add arr-diff +``` + +Install with [bower](https://bower.io/) + +```sh +$ bower install arr-diff --save +``` + +## Usage + +Returns the difference between the first array and additional arrays. + +```js +var diff = require('arr-diff'); + +var a = ['a', 'b', 'c', 'd']; +var b = ['b', 'c']; + +console.log(diff(a, b)) +//=> ['a', 'd'] +``` + +## Benchmarks + +This library versus [array-differ](https://github.com/sindresorhus/array-differ), on April 14, 2017: + +``` +Benchmarking: (4 of 4) + · long-dupes + · long + · med + · short + +# benchmark/fixtures/long-dupes.js (100804 bytes) + arr-diff-3.0.0 x 822 ops/sec ±0.67% (86 runs sampled) + arr-diff-4.0.0 x 2,141 ops/sec ±0.42% (89 runs sampled) + array-differ x 708 ops/sec ±0.70% (89 runs sampled) + + fastest is arr-diff-4.0.0 + +# benchmark/fixtures/long.js (94529 bytes) + arr-diff-3.0.0 x 882 ops/sec ±0.60% (87 runs sampled) + arr-diff-4.0.0 x 2,329 ops/sec ±0.97% (83 runs sampled) + array-differ x 769 ops/sec ±0.61% (90 runs sampled) + + fastest is arr-diff-4.0.0 + +# benchmark/fixtures/med.js (708 bytes) + arr-diff-3.0.0 x 856,150 ops/sec ±0.42% (89 runs sampled) + arr-diff-4.0.0 x 4,665,249 ops/sec ±1.06% (89 runs sampled) + array-differ x 653,888 ops/sec ±1.02% (86 runs sampled) + + fastest is arr-diff-4.0.0 + +# benchmark/fixtures/short.js (60 bytes) + arr-diff-3.0.0 x 3,078,467 ops/sec ±0.77% (93 runs sampled) + arr-diff-4.0.0 x 9,213,296 ops/sec ±0.65% (89 runs sampled) + array-differ x 1,337,051 ops/sec ±0.91% (92 runs sampled) + + fastest is arr-diff-4.0.0 +``` + +## About + +### Related projects + +* [arr-flatten](https://www.npmjs.com/package/arr-flatten): Recursively flatten an array or arrays. This is the fastest implementation of array flatten. | [homepage](https://github.com/jonschlinkert/arr-flatten "Recursively flatten an array or arrays. This is the fastest implementation of array flatten.") +* [array-filter](https://www.npmjs.com/package/array-filter): Array#filter for older browsers. | [homepage](https://github.com/juliangruber/array-filter "Array#filter for older browsers.") +* [array-intersection](https://www.npmjs.com/package/array-intersection): Return an array with the unique values present in _all_ given arrays using strict equality… [more](https://github.com/jonschlinkert/array-intersection) | [homepage](https://github.com/jonschlinkert/array-intersection "Return an array with the unique values present in _all_ given arrays using strict equality for comparisons.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 33 | [jonschlinkert](https://github.com/jonschlinkert) | +| 2 | [paulmillr](https://github.com/paulmillr) | + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.5.0, on April 14, 2017._ \ No newline at end of file diff --git a/node_modules/arr-diff/index.js b/node_modules/arr-diff/index.js new file mode 100644 index 00000000..90f28077 --- /dev/null +++ b/node_modules/arr-diff/index.js @@ -0,0 +1,47 @@ +/*! + * arr-diff + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +module.exports = function diff(arr/*, arrays*/) { + var len = arguments.length; + var idx = 0; + while (++idx < len) { + arr = diffArray(arr, arguments[idx]); + } + return arr; +}; + +function diffArray(one, two) { + if (!Array.isArray(two)) { + return one.slice(); + } + + var tlen = two.length + var olen = one.length; + var idx = -1; + var arr = []; + + while (++idx < olen) { + var ele = one[idx]; + + var hasEle = false; + for (var i = 0; i < tlen; i++) { + var val = two[i]; + + if (ele === val) { + hasEle = true; + break; + } + } + + if (hasEle === false) { + arr.push(ele); + } + } + return arr; +} diff --git a/node_modules/arr-diff/package.json b/node_modules/arr-diff/package.json new file mode 100644 index 00000000..c106813b --- /dev/null +++ b/node_modules/arr-diff/package.json @@ -0,0 +1,69 @@ +{ + "name": "arr-diff", + "description": "Returns an array with only the unique values from the first array, by excluding all values from additional arrays using strict equality for comparisons.", + "version": "4.0.0", + "homepage": "https://github.com/jonschlinkert/arr-diff", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Paul Miller (paulmillr.com)" + ], + "repository": "jonschlinkert/arr-diff", + "bugs": { + "url": "https://github.com/jonschlinkert/arr-diff/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": {}, + "devDependencies": { + "ansi-bold": "^0.1.1", + "arr-flatten": "^1.0.1", + "array-differ": "^1.0.0", + "benchmarked": "^0.2.4", + "gulp-format-md": "^0.1.9", + "minimist": "^1.2.0", + "mocha": "^2.4.5" + }, + "keywords": [ + "arr", + "array", + "array differ", + "array-differ", + "diff", + "differ", + "difference" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "arr-flatten", + "array-filter", + "array-intersection" + ] + }, + "reflinks": [ + "array-differ", + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/arr-flatten/LICENSE b/node_modules/arr-flatten/LICENSE new file mode 100755 index 00000000..3f2eca18 --- /dev/null +++ b/node_modules/arr-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/arr-flatten/README.md b/node_modules/arr-flatten/README.md new file mode 100755 index 00000000..7dc7a974 --- /dev/null +++ b/node_modules/arr-flatten/README.md @@ -0,0 +1,86 @@ +# arr-flatten [![NPM version](https://img.shields.io/npm/v/arr-flatten.svg?style=flat)](https://www.npmjs.com/package/arr-flatten) [![NPM monthly downloads](https://img.shields.io/npm/dm/arr-flatten.svg?style=flat)](https://npmjs.org/package/arr-flatten) [![NPM total downloads](https://img.shields.io/npm/dt/arr-flatten.svg?style=flat)](https://npmjs.org/package/arr-flatten) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/arr-flatten.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/arr-flatten) [![Windows Build Status](https://img.shields.io/appveyor/ci/jonschlinkert/arr-flatten.svg?style=flat&label=AppVeyor)](https://ci.appveyor.com/project/jonschlinkert/arr-flatten) + +> Recursively flatten an array or arrays. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save arr-flatten +``` + +## Install + +Install with [bower](https://bower.io/) + +```sh +$ bower install arr-flatten --save +``` + +## Usage + +```js +var flatten = require('arr-flatten'); + +flatten(['a', ['b', ['c']], 'd', ['e']]); +//=> ['a', 'b', 'c', 'd', 'e'] +``` + +## Why another flatten utility? + +I wanted the fastest implementation I could find, with implementation choices that should work for 95% of use cases, but no cruft to cover the other 5%. + +## About + +### Related projects + +* [arr-filter](https://www.npmjs.com/package/arr-filter): Faster alternative to javascript's native filter method. | [homepage](https://github.com/jonschlinkert/arr-filter "Faster alternative to javascript's native filter method.") +* [arr-union](https://www.npmjs.com/package/arr-union): Combines a list of arrays, returning a single array with unique values, using strict equality… [more](https://github.com/jonschlinkert/arr-union) | [homepage](https://github.com/jonschlinkert/arr-union "Combines a list of arrays, returning a single array with unique values, using strict equality for comparisons.") +* [array-each](https://www.npmjs.com/package/array-each): Loop over each item in an array and call the given function on every element. | [homepage](https://github.com/jonschlinkert/array-each "Loop over each item in an array and call the given function on every element.") +* [array-unique](https://www.npmjs.com/package/array-unique): Remove duplicate values from an array. Fastest ES5 implementation. | [homepage](https://github.com/jonschlinkert/array-unique "Remove duplicate values from an array. Fastest ES5 implementation.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 20 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [lukeed](https://github.com/lukeed) | + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 05, 2017._ \ No newline at end of file diff --git a/node_modules/arr-flatten/index.js b/node_modules/arr-flatten/index.js new file mode 100644 index 00000000..0cb4ea4e --- /dev/null +++ b/node_modules/arr-flatten/index.js @@ -0,0 +1,22 @@ +/*! + * arr-flatten + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +module.exports = function (arr) { + return flat(arr, []); +}; + +function flat(arr, res) { + var i = 0, cur; + var len = arr.length; + for (; i < len; i++) { + cur = arr[i]; + Array.isArray(cur) ? flat(cur, res) : res.push(cur); + } + return res; +} diff --git a/node_modules/arr-flatten/package.json b/node_modules/arr-flatten/package.json new file mode 100644 index 00000000..d2d33e9b --- /dev/null +++ b/node_modules/arr-flatten/package.json @@ -0,0 +1,76 @@ +{ + "name": "arr-flatten", + "description": "Recursively flatten an array or arrays.", + "version": "1.1.0", + "homepage": "https://github.com/jonschlinkert/arr-flatten", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Luke Edwards (https://lukeed.com)" + ], + "repository": "jonschlinkert/arr-flatten", + "bugs": { + "url": "https://github.com/jonschlinkert/arr-flatten/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "ansi-bold": "^0.1.1", + "array-flatten": "^2.1.1", + "array-slice": "^1.0.0", + "benchmarked": "^1.0.0", + "compute-flatten": "^1.0.0", + "flatit": "^1.1.1", + "flatten": "^1.0.2", + "flatten-array": "^1.0.0", + "glob": "^7.1.1", + "gulp-format-md": "^0.1.12", + "just-flatten-it": "^1.1.23", + "lodash.flattendeep": "^4.4.0", + "m_flattened": "^1.0.1", + "mocha": "^3.2.0", + "utils-flatten": "^1.0.0", + "write": "^0.3.3" + }, + "keywords": [ + "arr", + "array", + "elements", + "flat", + "flatten", + "nested", + "recurse", + "recursive", + "recursively" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "arr-filter", + "arr-union", + "array-each", + "array-unique" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/arr-union/LICENSE b/node_modules/arr-union/LICENSE new file mode 100644 index 00000000..39245ac1 --- /dev/null +++ b/node_modules/arr-union/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/arr-union/README.md b/node_modules/arr-union/README.md new file mode 100644 index 00000000..b3cd4f48 --- /dev/null +++ b/node_modules/arr-union/README.md @@ -0,0 +1,99 @@ +# arr-union [![NPM version](https://img.shields.io/npm/v/arr-union.svg)](https://www.npmjs.com/package/arr-union) [![Build Status](https://img.shields.io/travis/jonschlinkert/arr-union.svg)](https://travis-ci.org/jonschlinkert/arr-union) + +> Combines a list of arrays, returning a single array with unique values, using strict equality for comparisons. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm i arr-union --save +``` + +## Benchmarks + +This library is **10-20 times faster** and more performant than [array-union](https://github.com/sindresorhus/array-union). + +See the [benchmarks](./benchmark). + +```sh +#1: five-arrays + array-union x 511,121 ops/sec ±0.80% (96 runs sampled) + arr-union x 5,716,039 ops/sec ±0.86% (93 runs sampled) + +#2: ten-arrays + array-union x 245,196 ops/sec ±0.69% (94 runs sampled) + arr-union x 1,850,786 ops/sec ±0.84% (97 runs sampled) + +#3: two-arrays + array-union x 563,869 ops/sec ±0.97% (94 runs sampled) + arr-union x 9,602,852 ops/sec ±0.87% (92 runs sampled) +``` + +## Usage + +```js +var union = require('arr-union'); + +union(['a'], ['b', 'c'], ['d', 'e', 'f']); +//=> ['a', 'b', 'c', 'd', 'e', 'f'] +``` + +Returns only unique elements: + +```js +union(['a', 'a'], ['b', 'c']); +//=> ['a', 'b', 'c'] +``` + +## Related projects + +* [arr-diff](https://www.npmjs.com/package/arr-diff): Returns an array with only the unique values from the first array, by excluding all… [more](https://www.npmjs.com/package/arr-diff) | [homepage](https://github.com/jonschlinkert/arr-diff) +* [arr-filter](https://www.npmjs.com/package/arr-filter): Faster alternative to javascript's native filter method. | [homepage](https://github.com/jonschlinkert/arr-filter) +* [arr-flatten](https://www.npmjs.com/package/arr-flatten): Recursively flatten an array or arrays. This is the fastest implementation of array flatten. | [homepage](https://github.com/jonschlinkert/arr-flatten) +* [arr-map](https://www.npmjs.com/package/arr-map): Faster, node.js focused alternative to JavaScript's native array map. | [homepage](https://github.com/jonschlinkert/arr-map) +* [arr-pluck](https://www.npmjs.com/package/arr-pluck): Retrieves the value of a specified property from all elements in the collection. | [homepage](https://github.com/jonschlinkert/arr-pluck) +* [arr-reduce](https://www.npmjs.com/package/arr-reduce): Fast array reduce that also loops over sparse elements. | [homepage](https://github.com/jonschlinkert/arr-reduce) +* [array-unique](https://www.npmjs.com/package/array-unique): Return an array free of duplicate values. Fastest ES5 implementation. | [homepage](https://github.com/jonschlinkert/array-unique) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/arr-union/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm i verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016 [Jon Schlinkert](https://github.com/jonschlinkert) +Released under the [MIT license](https://github.com/jonschlinkert/arr-union/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v0.9.0, on February 23, 2016._ \ No newline at end of file diff --git a/node_modules/arr-union/index.js b/node_modules/arr-union/index.js new file mode 100644 index 00000000..5ae6c4a0 --- /dev/null +++ b/node_modules/arr-union/index.js @@ -0,0 +1,29 @@ +'use strict'; + +module.exports = function union(init) { + if (!Array.isArray(init)) { + throw new TypeError('arr-union expects the first argument to be an array.'); + } + + var len = arguments.length; + var i = 0; + + while (++i < len) { + var arg = arguments[i]; + if (!arg) continue; + + if (!Array.isArray(arg)) { + arg = [arg]; + } + + for (var j = 0; j < arg.length; j++) { + var ele = arg[j]; + + if (init.indexOf(ele) >= 0) { + continue; + } + init.push(ele); + } + } + return init; +}; diff --git a/node_modules/arr-union/package.json b/node_modules/arr-union/package.json new file mode 100644 index 00000000..5ee87fd7 --- /dev/null +++ b/node_modules/arr-union/package.json @@ -0,0 +1,76 @@ +{ + "name": "arr-union", + "description": "Combines a list of arrays, returning a single array with unique values, using strict equality for comparisons.", + "version": "3.1.0", + "homepage": "https://github.com/jonschlinkert/arr-union", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/arr-union", + "bugs": { + "url": "https://github.com/jonschlinkert/arr-union/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "ansi-bold": "^0.1.1", + "array-union": "^1.0.1", + "array-unique": "^0.2.1", + "benchmarked": "^0.1.4", + "gulp-format-md": "^0.1.7", + "minimist": "^1.1.1", + "mocha": "*", + "should": "*" + }, + "keywords": [ + "add", + "append", + "array", + "arrays", + "combine", + "concat", + "extend", + "union", + "uniq", + "unique", + "util", + "utility", + "utils" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "arr-diff", + "arr-flatten", + "arr-filter", + "arr-map", + "arr-pluck", + "arr-reduce", + "array-unique" + ] + }, + "reflinks": [ + "verb", + "array-union" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/array-flatten/LICENSE b/node_modules/array-flatten/LICENSE new file mode 100644 index 00000000..983fbe8a --- /dev/null +++ b/node_modules/array-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/array-flatten/README.md b/node_modules/array-flatten/README.md new file mode 100644 index 00000000..91fa5b63 --- /dev/null +++ b/node_modules/array-flatten/README.md @@ -0,0 +1,43 @@ +# Array Flatten + +[![NPM version][npm-image]][npm-url] +[![NPM downloads][downloads-image]][downloads-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +> Flatten an array of nested arrays into a single flat array. Accepts an optional depth. + +## Installation + +``` +npm install array-flatten --save +``` + +## Usage + +```javascript +var flatten = require('array-flatten') + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9]) +//=> [1, 2, 3, 4, 5, 6, 7, 8, 9] + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2) +//=> [1, 2, 3, [4, [5], 6], 7, 8, 9] + +(function () { + flatten(arguments) //=> [1, 2, 3] +})(1, [2, 3]) +``` + +## License + +MIT + +[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat +[npm-url]: https://npmjs.org/package/array-flatten +[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat +[downloads-url]: https://npmjs.org/package/array-flatten +[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat +[travis-url]: https://travis-ci.org/blakeembrey/array-flatten +[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat +[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master diff --git a/node_modules/array-flatten/array-flatten.js b/node_modules/array-flatten/array-flatten.js new file mode 100644 index 00000000..089117b3 --- /dev/null +++ b/node_modules/array-flatten/array-flatten.js @@ -0,0 +1,64 @@ +'use strict' + +/** + * Expose `arrayFlatten`. + */ +module.exports = arrayFlatten + +/** + * Recursive flatten function with depth. + * + * @param {Array} array + * @param {Array} result + * @param {Number} depth + * @return {Array} + */ +function flattenWithDepth (array, result, depth) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (depth > 0 && Array.isArray(value)) { + flattenWithDepth(value, result, depth - 1) + } else { + result.push(value) + } + } + + return result +} + +/** + * Recursive flatten function. Omitting depth is slightly faster. + * + * @param {Array} array + * @param {Array} result + * @return {Array} + */ +function flattenForever (array, result) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (Array.isArray(value)) { + flattenForever(value, result) + } else { + result.push(value) + } + } + + return result +} + +/** + * Flatten an array, with the ability to define a depth. + * + * @param {Array} array + * @param {Number} depth + * @return {Array} + */ +function arrayFlatten (array, depth) { + if (depth == null) { + return flattenForever(array, []) + } + + return flattenWithDepth(array, [], depth) +} diff --git a/node_modules/array-flatten/package.json b/node_modules/array-flatten/package.json new file mode 100644 index 00000000..1a24e2a1 --- /dev/null +++ b/node_modules/array-flatten/package.json @@ -0,0 +1,39 @@ +{ + "name": "array-flatten", + "version": "1.1.1", + "description": "Flatten an array of nested arrays into a single flat array", + "main": "array-flatten.js", + "files": [ + "array-flatten.js", + "LICENSE" + ], + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "repository": { + "type": "git", + "url": "git://github.com/blakeembrey/array-flatten.git" + }, + "keywords": [ + "array", + "flatten", + "arguments", + "depth" + ], + "author": { + "name": "Blake Embrey", + "email": "hello@blakeembrey.com", + "url": "http://blakeembrey.me" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/blakeembrey/array-flatten/issues" + }, + "homepage": "https://github.com/blakeembrey/array-flatten", + "devDependencies": { + "istanbul": "^0.3.13", + "mocha": "^2.2.4", + "pre-commit": "^1.0.7", + "standard": "^3.7.3" + } +} diff --git a/node_modules/array-unique/LICENSE b/node_modules/array-unique/LICENSE new file mode 100755 index 00000000..842218cf --- /dev/null +++ b/node_modules/array-unique/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/array-unique/README.md b/node_modules/array-unique/README.md new file mode 100755 index 00000000..41c8c904 --- /dev/null +++ b/node_modules/array-unique/README.md @@ -0,0 +1,77 @@ +# array-unique [![NPM version](https://img.shields.io/npm/v/array-unique.svg?style=flat)](https://www.npmjs.com/package/array-unique) [![NPM downloads](https://img.shields.io/npm/dm/array-unique.svg?style=flat)](https://npmjs.org/package/array-unique) [![Build Status](https://img.shields.io/travis/jonschlinkert/array-unique.svg?style=flat)](https://travis-ci.org/jonschlinkert/array-unique) + +Remove duplicate values from an array. Fastest ES5 implementation. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save array-unique +``` + +## Usage + +```js +var unique = require('array-unique'); + +var arr = ['a', 'b', 'c', 'c']; +console.log(unique(arr)) //=> ['a', 'b', 'c'] +console.log(arr) //=> ['a', 'b', 'c'] + +/* The above modifies the input array. To prevent that at a slight performance cost: */ +var unique = require("array-unique").immutable; + +var arr = ['a', 'b', 'c', 'c']; +console.log(unique(arr)) //=> ['a', 'b', 'c'] +console.log(arr) //=> ['a', 'b', 'c', 'c'] +``` + +## About + +### Related projects + +* [arr-diff](https://www.npmjs.com/package/arr-diff): Returns an array with only the unique values from the first array, by excluding all… [more](https://github.com/jonschlinkert/arr-diff) | [homepage](https://github.com/jonschlinkert/arr-diff "Returns an array with only the unique values from the first array, by excluding all values from additional arrays using strict equality for comparisons.") +* [arr-flatten](https://www.npmjs.com/package/arr-flatten): Recursively flatten an array or arrays. This is the fastest implementation of array flatten. | [homepage](https://github.com/jonschlinkert/arr-flatten "Recursively flatten an array or arrays. This is the fastest implementation of array flatten.") +* [arr-map](https://www.npmjs.com/package/arr-map): Faster, node.js focused alternative to JavaScript's native array map. | [homepage](https://github.com/jonschlinkert/arr-map "Faster, node.js focused alternative to JavaScript's native array map.") +* [arr-pluck](https://www.npmjs.com/package/arr-pluck): Retrieves the value of a specified property from all elements in the collection. | [homepage](https://github.com/jonschlinkert/arr-pluck "Retrieves the value of a specified property from all elements in the collection.") +* [arr-reduce](https://www.npmjs.com/package/arr-reduce): Fast array reduce that also loops over sparse elements. | [homepage](https://github.com/jonschlinkert/arr-reduce "Fast array reduce that also loops over sparse elements.") +* [arr-union](https://www.npmjs.com/package/arr-union): Combines a list of arrays, returning a single array with unique values, using strict equality… [more](https://github.com/jonschlinkert/arr-union) | [homepage](https://github.com/jonschlinkert/arr-union "Combines a list of arrays, returning a single array with unique values, using strict equality for comparisons.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/array-unique/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.28, on July 31, 2016._ \ No newline at end of file diff --git a/node_modules/array-unique/index.js b/node_modules/array-unique/index.js new file mode 100644 index 00000000..7e481e07 --- /dev/null +++ b/node_modules/array-unique/index.js @@ -0,0 +1,43 @@ +/*! + * array-unique + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +module.exports = function unique(arr) { + if (!Array.isArray(arr)) { + throw new TypeError('array-unique expects an array.'); + } + + var len = arr.length; + var i = -1; + + while (i++ < len) { + var j = i + 1; + + for (; j < arr.length; ++j) { + if (arr[i] === arr[j]) { + arr.splice(j--, 1); + } + } + } + return arr; +}; + +module.exports.immutable = function uniqueImmutable(arr) { + if (!Array.isArray(arr)) { + throw new TypeError('array-unique expects an array.'); + } + + var arrLen = arr.length; + var newArr = new Array(arrLen); + + for (var i = 0; i < arrLen; i++) { + newArr[i] = arr[i]; + } + + return module.exports(newArr); +}; diff --git a/node_modules/array-unique/package.json b/node_modules/array-unique/package.json new file mode 100644 index 00000000..d87640ab --- /dev/null +++ b/node_modules/array-unique/package.json @@ -0,0 +1,62 @@ +{ + "name": "array-unique", + "description": "Remove duplicate values from an array. Fastest ES5 implementation.", + "version": "0.3.2", + "homepage": "https://github.com/jonschlinkert/array-unique", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/array-unique", + "bugs": { + "url": "https://github.com/jonschlinkert/array-unique/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "LICENSE", + "README.md" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "array-uniq": "^1.0.2", + "benchmarked": "^0.1.3", + "gulp-format-md": "^0.1.9", + "mocha": "^2.5.3", + "should": "^10.0.0" + }, + "keywords": [ + "array", + "unique" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "arr-diff", + "arr-union", + "arr-flatten", + "arr-reduce", + "arr-map", + "arr-pluck" + ] + }, + "reflinks": [ + "verb", + "verb-generate-readme" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/assign-symbols/LICENSE b/node_modules/assign-symbols/LICENSE new file mode 100644 index 00000000..65f90aca --- /dev/null +++ b/node_modules/assign-symbols/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/assign-symbols/README.md b/node_modules/assign-symbols/README.md new file mode 100644 index 00000000..422729d4 --- /dev/null +++ b/node_modules/assign-symbols/README.md @@ -0,0 +1,73 @@ +# assign-symbols [![NPM version](https://badge.fury.io/js/assign-symbols.svg)](http://badge.fury.io/js/assign-symbols) + +> Assign the enumerable es6 Symbol properties from an object (or objects) to the first object passed on the arguments. Can be used as a supplement to other extend, assign or merge methods as a polyfill for the Symbols part of the es6 Object.assign method. + +From the [Mozilla Developer docs for Symbol](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol): + +> A symbol is a unique and immutable data type and may be used as an identifier for object properties. The symbol object is an implicit object wrapper for the symbol primitive data type. + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i assign-symbols --save +``` + +## Usage + +```js +var assignSymbols = require('assign-symbols'); +var obj = {}; + +var one = {}; +var symbolOne = Symbol('aaa'); +one[symbolOne] = 'bbb'; + +var two = {}; +var symbolTwo = Symbol('ccc'); +two[symbolTwo] = 'ddd'; + +assignSymbols(obj, one, two); + +console.log(obj[symbolOne]); +//=> 'bbb' +console.log(obj[symbolTwo]); +//=> 'ddd' +``` + +## Similar projects + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties of source objects to a destination object. | [homepage](https://github.com/jonschlinkert/assign-deep) +* [clone-deep](https://www.npmjs.com/package/clone-deep): Recursively (deep) clone JavaScript native types, like Object, Array, RegExp, Date as well as primitives. | [homepage](https://github.com/jonschlinkert/clone-deep) +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow) +* [merge-deep](https://www.npmjs.com/package/merge-deep): Recursively merge values in a javascript object. | [homepage](https://github.com/jonschlinkert/merge-deep) +* [mixin-deep](https://www.npmjs.com/package/mixin-deep): Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone. | [homepage](https://github.com/jonschlinkert/mixin-deep) + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/assign-symbols/issues/new). + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on November 06, 2015._ \ No newline at end of file diff --git a/node_modules/assign-symbols/index.js b/node_modules/assign-symbols/index.js new file mode 100644 index 00000000..c08a232b --- /dev/null +++ b/node_modules/assign-symbols/index.js @@ -0,0 +1,40 @@ +/*! + * assign-symbols + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +module.exports = function(receiver, objects) { + if (receiver === null || typeof receiver === 'undefined') { + throw new TypeError('expected first argument to be an object.'); + } + + if (typeof objects === 'undefined' || typeof Symbol === 'undefined') { + return receiver; + } + + if (typeof Object.getOwnPropertySymbols !== 'function') { + return receiver; + } + + var isEnumerable = Object.prototype.propertyIsEnumerable; + var target = Object(receiver); + var len = arguments.length, i = 0; + + while (++i < len) { + var provider = Object(arguments[i]); + var names = Object.getOwnPropertySymbols(provider); + + for (var j = 0; j < names.length; j++) { + var key = names[j]; + + if (isEnumerable.call(provider, key)) { + target[key] = provider[key]; + } + } + } + return target; +}; diff --git a/node_modules/assign-symbols/package.json b/node_modules/assign-symbols/package.json new file mode 100644 index 00000000..7f77b58d --- /dev/null +++ b/node_modules/assign-symbols/package.json @@ -0,0 +1,40 @@ +{ + "name": "assign-symbols", + "description": "Assign the enumerable es6 Symbol properties from an object (or objects) to the first object passed on the arguments. Can be used as a supplement to other extend, assign or merge methods as a polyfill for the Symbols part of the es6 Object.assign method.", + "version": "1.0.0", + "homepage": "https://github.com/jonschlinkert/assign-symbols", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/assign-symbols", + "bugs": { + "url": "https://github.com/jonschlinkert/assign-symbols/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "mocha": "^3.0.0" + }, + "keywords": [ + "assign", + "symbols" + ], + "verb": { + "related": { + "list": [ + "assign-deep", + "mixin-deep", + "merge-deep", + "extend-shallow", + "clone-deep" + ] + } + } +} diff --git a/node_modules/async-each/README.md b/node_modules/async-each/README.md new file mode 100644 index 00000000..6444d954 --- /dev/null +++ b/node_modules/async-each/README.md @@ -0,0 +1,52 @@ +# async-each + +No-bullshit, ultra-simple, 35-lines-of-code async parallel forEach function for JavaScript. + +We don't need junky 30K async libs. Really. + +For browsers and node.js. + +## Installation +* Just include async-each before your scripts. +* `npm install async-each` if you’re using node.js. + +## Usage + +* `each(array, iterator, callback);` — `Array`, `Function`, `(optional) Function` +* `iterator(item, next)` receives current item and a callback that will mark the item as done. `next` callback receives optional `error, transformedItem` arguments. +* `callback(error, transformedArray)` optionally receives first error and transformed result `Array`. + +```javascript +var each = require('async-each'); +each(['a.js', 'b.js', 'c.js'], fs.readFile, function(error, contents) { + if (error) console.error(error); + console.log('Contents for a, b and c:', contents); +}); + +// Alternatively in browser: +asyncEach(list, fn, callback); +``` + +## License + +The MIT License (MIT) + +Copyright (c) 2016 Paul Miller [(paulmillr.com)](http://paulmillr.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the “Software”), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/async-each/index.js b/node_modules/async-each/index.js new file mode 100644 index 00000000..277217df --- /dev/null +++ b/node_modules/async-each/index.js @@ -0,0 +1,38 @@ +// async-each MIT license (by Paul Miller from https://paulmillr.com). +(function(globals) { + 'use strict'; + var each = function(items, next, callback) { + if (!Array.isArray(items)) throw new TypeError('each() expects array as first argument'); + if (typeof next !== 'function') throw new TypeError('each() expects function as second argument'); + if (typeof callback !== 'function') callback = Function.prototype; // no-op + + if (items.length === 0) return callback(undefined, items); + + var transformed = new Array(items.length); + var count = 0; + var returned = false; + + items.forEach(function(item, index) { + next(item, function(error, transformedItem) { + if (returned) return; + if (error) { + returned = true; + return callback(error); + } + transformed[index] = transformedItem; + count += 1; + if (count === items.length) return callback(undefined, transformed); + }); + }); + }; + + if (typeof define !== 'undefined' && define.amd) { + define([], function() { + return each; + }); // RequireJS + } else if (typeof module !== 'undefined' && module.exports) { + module.exports = each; // CommonJS + } else { + globals.asyncEach = each; // + + +

Node-webkit-based module test

+ + diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json new file mode 100644 index 00000000..71d03f82 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json @@ -0,0 +1,9 @@ +{ +"main": "index.html", +"name": "nw-pre-gyp-module-test", +"description": "Node-webkit-based module test.", +"version": "0.0.1", +"window": { + "show": false +} +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/s3_setup.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/s3_setup.js new file mode 100644 index 00000000..5bc42e96 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/s3_setup.js @@ -0,0 +1,27 @@ +"use strict"; + +module.exports = exports; + +var url = require('url'); + +var URI_REGEX="^(.*)\.(s3(?:-.*)?)\.amazonaws\.com$"; + +module.exports.detect = function(to,config) { + var uri = url.parse(to); + var hostname_matches = uri.hostname.match(URI_REGEX); + config.prefix = (!uri.pathname || uri.pathname == '/') ? '' : uri.pathname.replace('/',''); + if(!hostname_matches) { + return; + } + if (!config.bucket) { + config.bucket = hostname_matches[1]; + } + if (!config.region) { + var s3_domain = hostname_matches[2]; + if (s3_domain.slice(0,3) == 's3-' && + s3_domain.length >= 3) { + // it appears the region is explicit in the url + config.region = s3_domain.replace('s3-',''); + } + } +}; diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/versioning.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/versioning.js new file mode 100644 index 00000000..fafb0da0 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/versioning.js @@ -0,0 +1,331 @@ +"use strict"; + +module.exports = exports; + +var path = require('path'); +var semver = require('semver'); +var url = require('url'); +var detect_libc = require('detect-libc'); +var napi = require('./napi.js'); + +var abi_crosswalk; + +// This is used for unit testing to provide a fake +// ABI crosswalk that emulates one that is not updated +// for the current version +if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) { + abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK); +} else { + abi_crosswalk = require('./abi_crosswalk.json'); +} + +var major_versions = {}; +Object.keys(abi_crosswalk).forEach(function(v) { + var major = v.split('.')[0]; + if (!major_versions[major]) { + major_versions[major] = v; + } +}); + +function get_electron_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_electron_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if electron is the target."); + } + // Electron guarantees that patch version update won't break native modules. + var sem_ver = semver.parse(target_version); + return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor; +} +module.exports.get_electron_abi = get_electron_abi; + +function get_node_webkit_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_node_webkit_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if node-webkit is the target."); + } + return runtime + '-v' + target_version; +} +module.exports.get_node_webkit_abi = get_node_webkit_abi; + +function get_node_abi(runtime, versions) { + if (!runtime) { + throw new Error("get_node_abi requires valid runtime arg"); + } + if (!versions) { + throw new Error("get_node_abi requires valid process.versions object"); + } + var sem_ver = semver.parse(versions.node); + if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series + // https://github.com/mapbox/node-pre-gyp/issues/124 + return runtime+'-v'+versions.node; + } else { + // process.versions.modules added in >= v0.10.4 and v0.11.7 + // https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e + return versions.modules ? runtime+'-v' + (+versions.modules) : + 'v8-' + versions.v8.split('.').slice(0,2).join('.'); + } +} +module.exports.get_node_abi = get_node_abi; + +function get_runtime_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_runtime_abi requires valid runtime arg"); + } + if (runtime === 'node-webkit') { + return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']); + } else if (runtime === 'electron') { + return get_electron_abi(runtime, target_version || process.versions.electron); + } else { + if (runtime != 'node') { + throw new Error("Unknown Runtime: '" + runtime + "'"); + } + if (!target_version) { + return get_node_abi(runtime,process.versions); + } else { + var cross_obj; + // abi_crosswalk generated with ./scripts/abi_crosswalk.js + if (abi_crosswalk[target_version]) { + cross_obj = abi_crosswalk[target_version]; + } else { + var target_parts = target_version.split('.').map(function(i) { return +i; }); + if (target_parts.length != 3) { // parse failed + throw new Error("Unknown target version: " + target_version); + } + /* + The below code tries to infer the last known ABI compatible version + that we have recorded in the abi_crosswalk.json when an exact match + is not possible. The reasons for this to exist are complicated: + + - We support passing --target to be able to allow developers to package binaries for versions of node + that are not the same one as they are running. This might also be used in combination with the + --target_arch or --target_platform flags to also package binaries for alternative platforms + - When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node + version that is running in memory + - So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look + this info up for all versions + - But we cannot easily predict what the future ABI will be for released versions + - And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly + by being fully available at install time. + - So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release + need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if + you want the `--target` flag to keep working for the latest version + - Which is impractical ^^ + - Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding. + + In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that + only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be + ABI compatible with v0.10.33). + + TODO: use semver module instead of custom version parsing + */ + var major = target_parts[0]; + var minor = target_parts[1]; + var patch = target_parts[2]; + // io.js: yeah if node.js ever releases 1.x this will break + // but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616 + if (major === 1) { + // look for last release that is the same major version + // e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0 + while (true) { + if (minor > 0) --minor; + if (patch > 0) --patch; + var new_iojs_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_iojs_target]) { + cross_obj = abi_crosswalk[new_iojs_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target'); + break; + } + if (minor === 0 && patch === 0) { + break; + } + } + } else if (major >= 2) { + // look for last release that is the same major version + if (major_versions[major]) { + cross_obj = abi_crosswalk[major_versions[major]]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + major_versions[major] + ' as ABI compatible target'); + } + } else if (major === 0) { // node.js + if (target_parts[1] % 2 === 0) { // for stable/even node.js series + // look for the last release that is the same minor release + // e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0 + while (--patch > 0) { + var new_node_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_node_target]) { + cross_obj = abi_crosswalk[new_node_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target'); + break; + } + } + } + } + } + if (!cross_obj) { + throw new Error("Unsupported target version: " + target_version); + } + // emulate process.versions + var versions_obj = { + node: target_version, + v8: cross_obj.v8+'.0', + // abi_crosswalk uses 1 for node versions lacking process.versions.modules + // process.versions.modules added in >= v0.10.4 and v0.11.7 + modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined + }; + return get_node_abi(runtime, versions_obj); + } + } +} +module.exports.get_runtime_abi = get_runtime_abi; + +var required_parameters = [ + 'module_name', + 'module_path', + 'host' +]; + +function validate_config(package_json,opts) { + var msg = package_json.name + ' package.json is not node-pre-gyp ready:\n'; + var missing = []; + if (!package_json.main) { + missing.push('main'); + } + if (!package_json.version) { + missing.push('version'); + } + if (!package_json.name) { + missing.push('name'); + } + if (!package_json.binary) { + missing.push('binary'); + } + var o = package_json.binary; + required_parameters.forEach(function(p) { + if (missing.indexOf('binary') > -1) { + missing.pop('binary'); + } + if (!o || o[p] === undefined || o[p] === "") { + missing.push('binary.' + p); + } + }); + if (missing.length >= 1) { + throw new Error(msg+"package.json must declare these properties: \n" + missing.join('\n')); + } + if (o) { + // enforce https over http + var protocol = url.parse(o.host).protocol; + if (protocol === 'http:') { + throw new Error("'host' protocol ("+protocol+") is invalid - only 'https:' is accepted"); + } + } + napi.validate_package_json(package_json,opts); +} + +module.exports.validate_config = validate_config; + +function eval_template(template,opts) { + Object.keys(opts).forEach(function(key) { + var pattern = '{'+key+'}'; + while (template.indexOf(pattern) > -1) { + template = template.replace(pattern,opts[key]); + } + }); + return template; +} + +// url.resolve needs single trailing slash +// to behave correctly, otherwise a double slash +// may end up in the url which breaks requests +// and a lacking slash may not lead to proper joining +function fix_slashes(pathname) { + if (pathname.slice(-1) != '/') { + return pathname + '/'; + } + return pathname; +} + +// remove double slashes +// note: path.normalize will not work because +// it will convert forward to back slashes +function drop_double_slashes(pathname) { + return pathname.replace(/\/\//g,'/'); +} + +function get_process_runtime(versions) { + var runtime = 'node'; + if (versions['node-webkit']) { + runtime = 'node-webkit'; + } else if (versions.electron) { + runtime = 'electron'; + } + return runtime; +} + +module.exports.get_process_runtime = get_process_runtime; + +var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz'; +var default_remote_path = ''; + +module.exports.evaluate = function(package_json,options,napi_build_version) { + options = options || {}; + validate_config(package_json,options); // options is a suitable substitute for opts in this case + var v = package_json.version; + var module_version = semver.parse(v); + var runtime = options.runtime || get_process_runtime(process.versions); + var opts = { + name: package_json.name, + configuration: Boolean(options.debug) ? 'Debug' : 'Release', + debug: options.debug, + module_name: package_json.binary.module_name, + version: module_version.version, + prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '', + build: module_version.build.length ? module_version.build.join('.') : '', + major: module_version.major, + minor: module_version.minor, + patch: module_version.patch, + runtime: runtime, + node_abi: get_runtime_abi(runtime,options.target), + node_abi_napi: napi.get_napi_version(options.target) ? 'napi' : get_runtime_abi(runtime,options.target), + napi_version: napi.get_napi_version(options.target), // non-zero numeric, undefined if unsupported + napi_build_version: napi_build_version || '', + node_napi_label: napi_build_version ? 'napi-v' + napi_build_version : get_runtime_abi(runtime,options.target), + target: options.target || '', + platform: options.target_platform || process.platform, + target_platform: options.target_platform || process.platform, + arch: options.target_arch || process.arch, + target_arch: options.target_arch || process.arch, + libc: options.target_libc || detect_libc.family || 'unknown', + module_main: package_json.main, + toolset : options.toolset || '' // address https://github.com/mapbox/node-pre-gyp/issues/119 + }; + // support host mirror with npm config `--{module_name}_binary_host_mirror` + // e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25 + // > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ + var host = process.env['npm_config_' + opts.module_name + '_binary_host_mirror'] || package_json.binary.host; + opts.host = fix_slashes(eval_template(host,opts)); + opts.module_path = eval_template(package_json.binary.module_path,opts); + // now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably + if (options.module_root) { + // resolve relative to known module root: works for pre-binding require + opts.module_path = path.join(options.module_root,opts.module_path); + } else { + // resolve relative to current working directory: works for node-pre-gyp commands + opts.module_path = path.resolve(opts.module_path); + } + opts.module = path.join(opts.module_path,opts.module_name + '.node'); + opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path,opts))) : default_remote_path; + var package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name; + opts.package_name = eval_template(package_name,opts); + opts.staged_tarball = path.join('build/stage',opts.remote_path,opts.package_name); + opts.hosted_path = url.resolve(opts.host,opts.remote_path); + opts.hosted_tarball = url.resolve(opts.hosted_path,opts.package_name); + return opts; +}; diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/package.json b/node_modules/fsevents/node_modules/node-pre-gyp/package.json new file mode 100644 index 00000000..a836f879 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/package.json @@ -0,0 +1,89 @@ +{ + "_args": [ + [ + "node-pre-gyp@0.12.0", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "node-pre-gyp@0.12.0", + "_id": "node-pre-gyp@0.12.0", + "_inBundle": false, + "_integrity": "sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A==", + "_location": "/node-pre-gyp", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "node-pre-gyp@0.12.0", + "name": "node-pre-gyp", + "escapedName": "node-pre-gyp", + "rawSpec": "0.12.0", + "saveSpec": null, + "fetchSpec": "0.12.0" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz", + "_spec": "0.12.0", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Dane Springmeyer", + "email": "dane@mapbox.com" + }, + "bin": { + "node-pre-gyp": "./bin/node-pre-gyp" + }, + "bugs": { + "url": "https://github.com/mapbox/node-pre-gyp/issues" + }, + "dependencies": { + "detect-libc": "^1.0.2", + "mkdirp": "^0.5.1", + "needle": "^2.2.1", + "nopt": "^4.0.1", + "npm-packlist": "^1.1.6", + "npmlog": "^4.0.2", + "rc": "^1.2.7", + "rimraf": "^2.6.1", + "semver": "^5.3.0", + "tar": "^4" + }, + "description": "Node.js native addon binary install tool", + "devDependencies": { + "aws-sdk": "^2.28.0", + "jshint": "^2.9.5", + "nock": "^9.2.3", + "tape": "^4.6.3" + }, + "homepage": "https://github.com/mapbox/node-pre-gyp#readme", + "jshintConfig": { + "node": true, + "globalstrict": true, + "undef": true, + "unused": false, + "noarg": true + }, + "keywords": [ + "native", + "addon", + "module", + "c", + "c++", + "bindings", + "binary" + ], + "license": "BSD-3-Clause", + "main": "./lib/node-pre-gyp.js", + "name": "node-pre-gyp", + "repository": { + "type": "git", + "url": "git://github.com/mapbox/node-pre-gyp.git" + }, + "scripts": { + "pretest": "jshint test/build.test.js test/s3_setup.test.js test/versioning.test.js test/fetch.test.js lib lib/util scripts bin/node-pre-gyp", + "test": "jshint lib lib/util scripts bin/node-pre-gyp && tape test/*test.js", + "update-crosswalk": "node scripts/abi_crosswalk.js" + }, + "version": "0.12.0" +} diff --git a/node_modules/fsevents/node_modules/nopt/.npmignore b/node_modules/fsevents/node_modules/nopt/.npmignore new file mode 100644 index 00000000..3c3629e6 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/fsevents/node_modules/nopt/.travis.yml b/node_modules/fsevents/node_modules/nopt/.travis.yml new file mode 100644 index 00000000..a1cef591 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - '0.12' + - '4' + - '6' + - '7' +before_install: + - npm install -g npm@latest diff --git a/node_modules/fsevents/node_modules/nopt/CHANGELOG.md b/node_modules/fsevents/node_modules/nopt/CHANGELOG.md new file mode 100644 index 00000000..82a09fb4 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/CHANGELOG.md @@ -0,0 +1,58 @@ +### v4.0.1 (2016-12-14) + +#### WHOOPS + +* [`fb9b1ce`](https://github.com/npm/nopt/commit/fb9b1ce57b3c69b4f7819015be87719204f77ef6) + Merged so many patches at once that the code fencing + ([@adius](https://github.com/adius)) added got broken. Sorry, + ([@adius](https://github.com/adius))! + ([@othiym23](https://github.com/othiym23)) + +### v4.0.0 (2016-12-13) + +#### BREAKING CHANGES + +* [`651d447`](https://github.com/npm/nopt/commit/651d4473946096d341a480bbe56793de3fc706aa) + When parsing String-typed arguments, if the next value is `""`, don't simply + swallow it. ([@samjonester](https://github.com/samjonester)) + +#### PERFORMANCE TWEAKS + +* [`3370ce8`](https://github.com/npm/nopt/commit/3370ce87a7618ba228883861db84ddbcdff252a9) + Simplify initialization. ([@elidoran](https://github.com/elidoran)) +* [`356e58e`](https://github.com/npm/nopt/commit/356e58e3b3b431a4b1af7fd7bdee44c2c0526a09) + Store `Array.isArray(types[arg])` for reuse. + ([@elidoran](https://github.com/elidoran)) +* [`0d95e90`](https://github.com/npm/nopt/commit/0d95e90515844f266015b56d2c80b94e5d14a07e) + Interpret single-item type arrays as a single type. + ([@samjonester](https://github.com/samjonester)) +* [`07c69d3`](https://github.com/npm/nopt/commit/07c69d38b5186450941fbb505550becb78a0e925) + Simplify key-value extraction. ([@elidoran](https://github.com/elidoran)) +* [`39b6e5c`](https://github.com/npm/nopt/commit/39b6e5c65ac47f60cd43a1fbeece5cd4c834c254) + Only call `Date.parse(val)` once. ([@elidoran](https://github.com/elidoran)) +* [`934943d`](https://github.com/npm/nopt/commit/934943dffecb55123a2b15959fe2a359319a5dbd) + Use `osenv.home()` to find a user's home directory instead of assuming it's + always `$HOME`. ([@othiym23](https://github.com/othiym23)) + +#### TEST & CI IMPROVEMENTS + +* [`326ffff`](https://github.com/npm/nopt/commit/326ffff7f78a00bcd316adecf69075f8a8093619) + Fix `/tmp` test to work on Windows. + ([@elidoran](https://github.com/elidoran)) +* [`c89d31a`](https://github.com/npm/nopt/commit/c89d31a49d14f2238bc6672db08da697bbc57f1b) + Only run Windows tests on Windows, only run Unix tests on a Unix. + ([@elidoran](https://github.com/elidoran)) +* [`affd3d1`](https://github.com/npm/nopt/commit/affd3d1d0addffa93006397b2013b18447339366) + Refresh Travis to run the tests against the currently-supported batch of npm + versions. ([@helio](https://github.com/helio)-frota) +* [`55f9449`](https://github.com/npm/nopt/commit/55f94497d163ed4d16dd55fd6c4fb95cc440e66d) + `tap@8.0.1` ([@othiym23](https://github.com/othiym23)) + +#### DOC TWEAKS + +* [`5271229`](https://github.com/npm/nopt/commit/5271229ee7c810217dd51616c086f5d9ab224581) + Use JavaScript code block for syntax highlighting. + ([@adius](https://github.com/adius)) +* [`c0d156f`](https://github.com/npm/nopt/commit/c0d156f229f9994c5dfcec4a8886eceff7a07682) + The code sample in the README had `many2: [ oneThing ]`, and now it has + `many2: [ two, things ]`. ([@silkentrance](https://github.com/silkentrance)) diff --git a/node_modules/fsevents/node_modules/nopt/LICENSE b/node_modules/fsevents/node_modules/nopt/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/nopt/README.md b/node_modules/fsevents/node_modules/nopt/README.md new file mode 100644 index 00000000..a99531c0 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/README.md @@ -0,0 +1,213 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you hit the limit of your frustration just trying to +manage them all, and defer it with duct-tape solutions until you see +exactly to the core of the problem, and finally snap and write an +awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + +```javascript +// my-program.js +var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many1" : [String, Array] + , "many2" : [path, Array] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) +console.log(parsed) +``` + +This would give you support for any of the following: + +```console +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk -fp # unknown opts are ok. +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many1 5 --many1 null --many1 foo +{ many1: ["5", "null", "foo"] } + +$ node my-program.js --many2 foo --many2 bar +{ many2: ["/path/to/foo", "path/to/bar"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. When types are ordered, this implies a +preference, and the first type that can be used to properly interpret +the value will be used. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid values for +known options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/node_modules/fsevents/node_modules/nopt/bin/nopt.js b/node_modules/fsevents/node_modules/nopt/bin/nopt.js new file mode 100755 index 00000000..3232d4c5 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/bin/nopt.js @@ -0,0 +1,54 @@ +#!/usr/bin/env node +var nopt = require("../lib/nopt") + , path = require("path") + , types = { num: Number + , bool: Boolean + , help: Boolean + , list: Array + , "num-list": [Number, Array] + , "str-list": [String, Array] + , "bool-list": [Boolean, Array] + , str: String + , clear: Boolean + , config: Boolean + , length: Number + , file: path + } + , shorthands = { s: [ "--str", "astring" ] + , b: [ "--bool" ] + , nb: [ "--no-bool" ] + , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] + , "?": ["--help"] + , h: ["--help"] + , H: ["--help"] + , n: [ "--num", "125" ] + , c: ["--config"] + , l: ["--length"] + , f: ["--file"] + } + , parsed = nopt( types + , shorthands + , process.argv + , 2 ) + +console.log("parsed", parsed) + +if (parsed.help) { + console.log("") + console.log("nopt cli tester") + console.log("") + console.log("types") + console.log(Object.keys(types).map(function M (t) { + var type = types[t] + if (Array.isArray(type)) { + return [t, type.map(function (type) { return type.name })] + } + return [t, type && type.name] + }).reduce(function (s, i) { + s[i[0]] = i[1] + return s + }, {})) + console.log("") + console.log("shorthands") + console.log(shorthands) +} diff --git a/node_modules/fsevents/node_modules/nopt/examples/my-program.js b/node_modules/fsevents/node_modules/nopt/examples/my-program.js new file mode 100755 index 00000000..142447e1 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/examples/my-program.js @@ -0,0 +1,30 @@ +#!/usr/bin/env node + +//process.env.DEBUG_NOPT = 1 + +// my-program.js +var nopt = require("../lib/nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag", "true"] + , "g" : ["--flag"] + , "s" : "--flag" + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + +console.log("parsed =\n"+ require("util").inspect(parsed)) diff --git a/node_modules/fsevents/node_modules/nopt/lib/nopt.js b/node_modules/fsevents/node_modules/nopt/lib/nopt.js new file mode 100644 index 00000000..1fb11351 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/lib/nopt.js @@ -0,0 +1,436 @@ +// info about each config option. + +var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + ? function () { console.error.apply(console, arguments) } + : function () {} + +var url = require("url") + , path = require("path") + , Stream = require("stream").Stream + , abbrev = require("abbrev") + , osenv = require("osenv") + +module.exports = exports = nopt +exports.clean = clean + +exports.typeDefs = + { String : { type: String, validate: validateString } + , Boolean : { type: Boolean, validate: validateBoolean } + , url : { type: url, validate: validateUrl } + , Number : { type: Number, validate: validateNumber } + , path : { type: path, validate: validatePath } + , Stream : { type: Stream, validate: validateStream } + , Date : { type: Date, validate: validateDate } + } + +function nopt (types, shorthands, args, slice) { + args = args || process.argv + types = types || {} + shorthands = shorthands || {} + if (typeof slice !== "number") slice = 2 + + debug(types, shorthands, args, slice) + + args = args.slice(slice) + var data = {} + , key + , argv = { + remain: [], + cooked: args, + original: args.slice(0) + } + + parse(args, data, argv.remain, types, shorthands) + // now data is full + clean(data, types, exports.typeDefs) + data.argv = argv + Object.defineProperty(data.argv, 'toString', { value: function () { + return this.original.map(JSON.stringify).join(" ") + }, enumerable: false }) + return data +} + +function clean (data, types, typeDefs) { + typeDefs = typeDefs || exports.typeDefs + var remove = {} + , typeDefault = [false, true, null, String, Array] + + Object.keys(data).forEach(function (k) { + if (k === "argv") return + var val = data[k] + , isArray = Array.isArray(val) + , type = types[k] + if (!isArray) val = [val] + if (!type) type = typeDefault + if (type === Array) type = typeDefault.concat(Array) + if (!Array.isArray(type)) type = [type] + + debug("val=%j", val) + debug("types=", type) + val = val.map(function (val) { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof val === "string") { + debug("string %j", val) + val = val.trim() + if ((val === "null" && ~type.indexOf(null)) + || (val === "true" && + (~type.indexOf(true) || ~type.indexOf(Boolean))) + || (val === "false" && + (~type.indexOf(false) || ~type.indexOf(Boolean)))) { + val = JSON.parse(val) + debug("jsonable %j", val) + } else if (~type.indexOf(Number) && !isNaN(val)) { + debug("convert to number", val) + val = +val + } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { + debug("convert to date", val) + val = new Date(val) + } + } + + if (!types.hasOwnProperty(k)) { + return val + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (val === false && ~type.indexOf(null) && + !(~type.indexOf(false) || ~type.indexOf(Boolean))) { + val = null + } + + var d = {} + d[k] = val + debug("prevalidated val", d, val, types[k]) + if (!validate(d, k, val, types[k], typeDefs)) { + if (exports.invalidHandler) { + exports.invalidHandler(k, val, types[k], data) + } else if (exports.invalidHandler !== false) { + debug("invalid: "+k+"="+val, types[k]) + } + return remove + } + debug("validated val", d, val, types[k]) + return d[k] + }).filter(function (val) { return val !== remove }) + + if (!val.length) delete data[k] + else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else data[k] = val[0] + + debug("k=%s val=%j", k, val, data[k]) + }) +} + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + if (val === true) return false + if (val === null) return true + + val = String(val) + + var isWin = process.platform === 'win32' + , homePattern = isWin ? /^~(\/|\\)/ : /^~\// + , home = osenv.home() + + if (home && val.match(homePattern)) { + data[k] = path.resolve(home, val.substr(2)) + } else { + data[k] = path.resolve(val) + } + return true +} + +function validateNumber (data, k, val) { + debug("validate Number %j %j %j", k, val, isNaN(val)) + if (isNaN(val)) return false + data[k] = +val +} + +function validateDate (data, k, val) { + var s = Date.parse(val) + debug("validate Date %j %j %j", k, val, s) + if (isNaN(s)) return false + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (val instanceof Boolean) val = val.valueOf() + else if (typeof val === "string") { + if (!isNaN(val)) val = !!(+val) + else if (val === "null" || val === "false") val = false + else val = true + } else val = !!val + data[k] = val +} + +function validateUrl (data, k, val) { + val = url.parse(String(val)) + if (!val.host) return false + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) return false + data[k] = val +} + +function validate (data, k, val, type, typeDefs) { + // arrays are lists of types. + if (Array.isArray(type)) { + for (var i = 0, l = type.length; i < l; i ++) { + if (type[i] === Array) continue + if (validate(data, k, val, type[i], typeDefs)) return true + } + delete data[k] + return false + } + + // an array of anything? + if (type === Array) return true + + // NaN is poisonous. Means that something is not allowed. + if (type !== type) { + debug("Poison NaN", k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug("Explicitly allowed %j", val) + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + var ok = false + , types = Object.keys(typeDefs) + for (var i = 0, l = types.length; i < l; i ++) { + debug("test type %j %j %j", k, val, types[i]) + var t = typeDefs[types[i]] + if (t && + ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) { + var d = {} + ok = false !== t.validate(d, k, val) + val = d[k] + if (ok) { + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + break + } + } + } + debug("OK? %j (%j %j %j)", ok, k, val, types[i]) + + if (!ok) delete data[k] + return ok +} + +function parse (args, data, remain, types, shorthands) { + debug("parse", args, data, remain) + + var key = null + , abbrevs = abbrev(Object.keys(types)) + , shortAbbr = abbrev(Object.keys(shorthands)) + + for (var i = 0; i < args.length; i ++) { + var arg = args[i] + debug("arg", arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = "--" + break + } + var hadEq = false + if (arg.charAt(0) === "-" && arg.length > 1) { + var at = arg.indexOf('=') + if (at > -1) { + hadEq = true + var v = arg.substr(at + 1) + arg = arg.substr(0, at) + args.splice(i, 1, arg, v) + } + + // see if it's a shorthand + // if so, splice and back up to re-parse it. + var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) + debug("arg=%j shRes=%j", arg, shRes) + if (shRes) { + debug(arg, shRes) + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i -- + continue + } + } + arg = arg.replace(/^-+/, "") + var no = null + while (arg.toLowerCase().indexOf("no-") === 0) { + no = !no + arg = arg.substr(3) + } + + if (abbrevs[arg]) arg = abbrevs[arg] + + var argType = types[arg] + var isTypeArray = Array.isArray(argType) + if (isTypeArray && argType.length === 1) { + isTypeArray = false + argType = argType[0] + } + + var isArray = argType === Array || + isTypeArray && argType.indexOf(Array) !== -1 + + // allow unknown things to be arrays if specified multiple times. + if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { + if (!Array.isArray(data[arg])) + data[arg] = [data[arg]] + isArray = true + } + + var val + , la = args[i + 1] + + var isBool = typeof no === 'boolean' || + argType === Boolean || + isTypeArray && argType.indexOf(Boolean) !== -1 || + (typeof argType === 'undefined' && !hadEq) || + (la === "false" && + (argType === null || + isTypeArray && ~argType.indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === "true" || la === "false") { + val = JSON.parse(la) + la = null + if (no) val = !val + i ++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (isTypeArray && la) { + if (~argType.indexOf(la)) { + // an explicit type + val = la + i ++ + } else if ( la === "null" && ~argType.indexOf(null) ) { + // null allowed + val = null + i ++ + } else if ( !la.match(/^-{2,}[^-]/) && + !isNaN(la) && + ~argType.indexOf(Number) ) { + // number + val = +la + i ++ + } else if ( !la.match(/^-[^-]/) && ~argType.indexOf(String) ) { + // string + val = la + i ++ + } + } + + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + continue + } + + if (argType === String) { + if (la === undefined) { + la = "" + } else if (la.match(/^-{1,2}[^-]+/)) { + la = "" + i -- + } + } + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i -- + } + + val = la === undefined ? true : la + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + i ++ + continue + } + remain.push(arg) + } +} + +function resolveShort (arg, shorthands, shortAbbr, abbrevs) { + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) + return null + + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] + } + + // first check to see if this arg is a set of single-char shorthands + var singles = shorthands.___singles + if (!singles) { + singles = Object.keys(shorthands).filter(function (s) { + return s.length === 1 + }).reduce(function (l,r) { + l[r] = true + return l + }, {}) + shorthands.___singles = singles + debug('shorthand singles', singles) + } + + var chrs = arg.split("").filter(function (c) { + return singles[c] + }) + + if (chrs.join("") === arg) return chrs.map(function (c) { + return shorthands[c] + }).reduce(function (l, r) { + return l.concat(r) + }, []) + + + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) + return null + + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) + arg = shortAbbr[arg] + + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] +} diff --git a/node_modules/fsevents/node_modules/nopt/package.json b/node_modules/fsevents/node_modules/nopt/package.json new file mode 100644 index 00000000..7e1c23ef --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/package.json @@ -0,0 +1,61 @@ +{ + "_args": [ + [ + "nopt@4.0.1", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "nopt@4.0.1", + "_id": "nopt@4.0.1", + "_inBundle": false, + "_integrity": "sha1-0NRoWv1UFRk8jHUFYC0NF81kR00=", + "_location": "/nopt", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "nopt@4.0.1", + "name": "nopt", + "escapedName": "nopt", + "rawSpec": "4.0.1", + "saveSpec": null, + "fetchSpec": "4.0.1" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.1.tgz", + "_spec": "4.0.1", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bin": { + "nopt": "./bin/nopt.js" + }, + "bugs": { + "url": "https://github.com/npm/nopt/issues" + }, + "dependencies": { + "abbrev": "1", + "osenv": "^0.1.4" + }, + "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", + "devDependencies": { + "tap": "^8.0.1" + }, + "homepage": "https://github.com/npm/nopt#readme", + "license": "ISC", + "main": "lib/nopt.js", + "name": "nopt", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/nopt.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "4.0.1" +} diff --git a/node_modules/fsevents/node_modules/nopt/test/basic.js b/node_modules/fsevents/node_modules/nopt/test/basic.js new file mode 100644 index 00000000..5c18ac0f --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/test/basic.js @@ -0,0 +1,303 @@ +var nopt = require("../") + , test = require('tap').test + , isWin = process.platform === 'win32' + +test("passing a string results in a string", function (t) { + var parsed = nopt({ key: String }, {}, ["--key", "myvalue"], 0) + t.same(parsed.key, "myvalue") + t.end() +}) + +// https://github.com/npm/nopt/issues/31 +test("Empty String results in empty string, not true", function (t) { + var parsed = nopt({ empty: String }, {}, ["--empty"], 0) + t.same(parsed.empty, "") + t.end() +}) + +// https://github.com/npm/nopt/issues/65 +test("Empty String should not swallow next flag", function (t) { + var parsed = nopt({ empty: String, foo: String }, {}, ["--empty", "--foo"], 0) + t.same(parsed.empty, "") + t.same(parsed.foo, "") + t.end() +}) + +// https://github.com/npm/nopt/issues/66 +test("Empty String should not be true when type is single item Array", function (t) { + var parsed = nopt({ 'foo': [String] }, {}, ["--foo"], 0) + t.same(parsed.foo, "") + t.end() +}) + +test("~ path is resolved to " + (isWin ? '%USERPROFILE%' : '$HOME'), function (t) { + var path = require("path") + , the + + if (isWin) { + the = { + key: 'USERPROFILE', + dir: 'C:\\temp', + val: '~\\val' + } + } else { + the = { + key: 'HOME', + dir: '/tmp', + val: '~/val' + } + } + if (!process.env[the.key]) process.env[the.key] = v.dir + var parsed = nopt({key: path}, {}, ["--key=" + the.val], 0) + t.same(parsed.key, path.resolve(process.env[the.key], "val")) + t.end() +}) + +// https://github.com/npm/nopt/issues/24 +test("Unknown options are not parsed as numbers", function (t) { + var parsed = nopt({"parse-me": Number}, null, ['--leave-as-is=1.20', '--parse-me=1.20'], 0) + t.equal(parsed['leave-as-is'], '1.20') + t.equal(parsed['parse-me'], 1.2) + t.end() +}); + +// https://github.com/npm/nopt/issues/48 +test("Check types based on name of type", function (t) { + var parsed = nopt({"parse-me": {name: "Number"}}, null, ['--parse-me=1.20'], 0) + t.equal(parsed['parse-me'], 1.2) + t.end() +}) + + +test("Missing types are not parsed", function (t) { + var parsed = nopt({"parse-me": {}}, null, ['--parse-me=1.20'], 0) + //should only contain argv + t.equal(Object.keys(parsed).length, 1) + t.end() +}) + +test("Types passed without a name are not parsed", function (t) { + var parsed = nopt({"parse-me": {}}, {}, ['--parse-me=1.20'], 0) + //should only contain argv + t.equal(Object.keys(parsed).length, 1) + t.end() +}) + +test("other tests", function (t) { + + var util = require("util") + , Stream = require("stream") + , path = require("path") + , url = require("url") + + , shorthands = + { s : ["--loglevel", "silent"] + , d : ["--loglevel", "info"] + , dd : ["--loglevel", "verbose"] + , ddd : ["--loglevel", "silly"] + , noreg : ["--no-registry"] + , reg : ["--registry"] + , "no-reg" : ["--no-registry"] + , silent : ["--loglevel", "silent"] + , verbose : ["--loglevel", "verbose"] + , h : ["--usage"] + , H : ["--usage"] + , "?" : ["--usage"] + , help : ["--usage"] + , v : ["--version"] + , f : ["--force"] + , desc : ["--description"] + , "no-desc" : ["--no-description"] + , "local" : ["--no-global"] + , l : ["--long"] + , p : ["--parseable"] + , porcelain : ["--parseable"] + , g : ["--global"] + } + + , types = + { aoa: Array + , nullstream: [null, Stream] + , date: Date + , str: String + , browser : String + , cache : path + , color : ["always", Boolean] + , depth : Number + , description : Boolean + , dev : Boolean + , editor : path + , force : Boolean + , global : Boolean + , globalconfig : path + , group : [String, Number] + , gzipbin : String + , logfd : [Number, Stream] + , loglevel : ["silent","win","error","warn","info","verbose","silly"] + , long : Boolean + , "node-version" : [false, String] + , npaturl : url + , npat : Boolean + , "onload-script" : [false, String] + , outfd : [Number, Stream] + , parseable : Boolean + , pre: Boolean + , prefix: path + , proxy : url + , "rebuild-bundle" : Boolean + , registry : url + , searchopts : String + , searchexclude: [null, String] + , shell : path + , t: [Array, String] + , tag : String + , tar : String + , tmp : path + , "unsafe-perm" : Boolean + , usage : Boolean + , user : String + , username : String + , userconfig : path + , version : Boolean + , viewer: path + , _exit : Boolean + , path: path + } + + ; [["-v", {version:true}, []] + ,["---v", {version:true}, []] + ,["ls -s --no-reg connect -d", + {loglevel:"info",registry:null},["ls","connect"]] + ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]] + ,["ls --registry blargle", {}, ["ls"]] + ,["--no-registry", {registry:null}, []] + ,["--no-color true", {color:false}, []] + ,["--no-color false", {color:true}, []] + ,["--no-color", {color:false}, []] + ,["--color false", {color:false}, []] + ,["--color --logfd 7", {logfd:7,color:true}, []] + ,["--color=true", {color:true}, []] + ,["--logfd=10", {logfd:10}, []] + ,["--tmp=/tmp -tar=gtar", {tmp: isWin ? "C:\\tmp" : "/tmp",tar:"gtar"},[]] + ,["--tmp=tmp -tar=gtar", + {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]] + ,["--logfd x", {}, []] + ,["a -true -- -no-false", {true:true},["a","-no-false"]] + ,["a -no-false", {false:false},["a"]] + ,["a -no-no-true", {true:true}, ["a"]] + ,["a -no-no-no-false", {false:false}, ["a"]] + ,["---NO-no-No-no-no-no-nO-no-no"+ + "-No-no-no-no-no-no-no-no-no"+ + "-no-no-no-no-NO-NO-no-no-no-no-no-no"+ + "-no-body-can-do-the-boogaloo-like-I-do" + ,{"body-can-do-the-boogaloo-like-I-do":false}, []] + ,["we are -no-strangers-to-love "+ + "--you-know=the-rules --and=so-do-i "+ + "---im-thinking-of=a-full-commitment "+ + "--no-you-would-get-this-from-any-other-guy "+ + "--no-gonna-give-you-up "+ + "-no-gonna-let-you-down=true "+ + "--no-no-gonna-run-around false "+ + "--desert-you=false "+ + "--make-you-cry false "+ + "--no-tell-a-lie "+ + "--no-no-and-hurt-you false" + ,{"strangers-to-love":false + ,"you-know":"the-rules" + ,"and":"so-do-i" + ,"you-would-get-this-from-any-other-guy":false + ,"gonna-give-you-up":false + ,"gonna-let-you-down":false + ,"gonna-run-around":false + ,"desert-you":false + ,"make-you-cry":false + ,"tell-a-lie":false + ,"and-hurt-you":false + },["we", "are"]] + ,["-t one -t two -t three" + ,{t: ["one", "two", "three"]} + ,[]] + ,["-t one -t null -t three four five null" + ,{t: ["one", "null", "three"]} + ,["four", "five", "null"]] + ,["-t foo" + ,{t:["foo"]} + ,[]] + ,["--no-t" + ,{t:["false"]} + ,[]] + ,["-no-no-t" + ,{t:["true"]} + ,[]] + ,["-aoa one -aoa null -aoa 100" + ,{aoa:["one", null, '100']} + ,[]] + ,["-str 100" + ,{str:"100"} + ,[]] + ,["--color always" + ,{color:"always"} + ,[]] + ,["--no-nullstream" + ,{nullstream:null} + ,[]] + ,["--nullstream false" + ,{nullstream:null} + ,[]] + ,["--notadate=2011-01-25" + ,{notadate: "2011-01-25"} + ,[]] + ,["--date 2011-01-25" + ,{date: new Date("2011-01-25")} + ,[]] + ,["-cl 1" + ,{config: true, length: 1} + ,[] + ,{config: Boolean, length: Number, clear: Boolean} + ,{c: "--config", l: "--length"}] + ,["--acount bla" + ,{"acount":true} + ,["bla"] + ,{account: Boolean, credentials: Boolean, options: String} + ,{a:"--account", c:"--credentials",o:"--options"}] + ,["--clear" + ,{clear:true} + ,[] + ,{clear:Boolean,con:Boolean,len:Boolean,exp:Boolean,add:Boolean,rep:Boolean} + ,{c:"--con",l:"--len",e:"--exp",a:"--add",r:"--rep"}] + ,["--file -" + ,{"file":"-"} + ,[] + ,{file:String} + ,{}] + ,["--file -" + ,{"file":true} + ,["-"] + ,{file:Boolean} + ,{}] + ,["--path" + ,{"path":null} + ,[]] + ,["--path ." + ,{"path":process.cwd()} + ,[]] + ].forEach(function (test) { + var argv = test[0].split(/\s+/) + , opts = test[1] + , rem = test[2] + , actual = nopt(test[3] || types, test[4] || shorthands, argv, 0) + , parsed = actual.argv + delete actual.argv + for (var i in opts) { + var e = JSON.stringify(opts[i]) + , a = JSON.stringify(actual[i] === undefined ? null : actual[i]) + if (e && typeof e === "object") { + t.deepEqual(e, a) + } else { + t.equal(e, a) + } + } + t.deepEqual(rem, parsed.remain) + }) + t.end() +}) diff --git a/node_modules/fsevents/node_modules/npm-bundled/LICENSE b/node_modules/fsevents/node_modules/npm-bundled/LICENSE new file mode 100644 index 00000000..20a47625 --- /dev/null +++ b/node_modules/fsevents/node_modules/npm-bundled/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/npm-bundled/README.md b/node_modules/fsevents/node_modules/npm-bundled/README.md new file mode 100644 index 00000000..fcfb2322 --- /dev/null +++ b/node_modules/fsevents/node_modules/npm-bundled/README.md @@ -0,0 +1,48 @@ +# npm-bundled + +Run this in a node package, and it'll tell you which things in +node_modules are bundledDependencies, or transitive dependencies of +bundled dependencies. + +[![Build Status](https://travis-ci.org/npm/npm-bundled.svg?branch=master)](https://travis-ci.org/npm/npm-bundled) + +## USAGE + +To get the list of deps at the top level that are bundled (or +transitive deps of a bundled dep) run this: + +```js +const bundled = require('npm-bundled') + +// async version +bundled({ path: '/path/to/pkg/defaults/to/cwd'}, (er, list) => { + // er means it had an error, which is _hella_ weird + // list is a list of package names, like `fooblz` or `@corp/blerg` + // the might not all be deps of the top level, because transitives +}) + +// async promise version +bundled({ path: '/path/to/pkg/defaults/to/cwd'}).then(list => { + // so promisey! + // actually the callback version returns a promise, too, it just + // attaches the supplied callback to the promise +}) + +// sync version, throws if there's an error +const list = bundled({ path: '/path/to/pkg/defaults/to/cwd'}) +``` + +That's basically all you need to know. If you care to dig into it, +you can also use the `bundled.Walker` and `bundled.WalkerSync` +classes to get fancy. + +This library does not write anything to the filesystem, but it _may_ +have undefined behavior if the structure of `node_modules` changes +while it's reading deps. + +All symlinks are followed. This means that it can lead to surprising +results if a symlinked bundled dependency has a missing dependency +that is satisfied at the top level. Since package creation resolves +symlinks as well, this is an edge case where package creation and +development environment are not going to be aligned, and is best +avoided. diff --git a/node_modules/fsevents/node_modules/npm-bundled/index.js b/node_modules/fsevents/node_modules/npm-bundled/index.js new file mode 100644 index 00000000..bde0acd1 --- /dev/null +++ b/node_modules/fsevents/node_modules/npm-bundled/index.js @@ -0,0 +1,241 @@ +'use strict' + +// walk the tree of deps starting from the top level list of bundled deps +// Any deps at the top level that are depended on by a bundled dep that +// does not have that dep in its own node_modules folder are considered +// bundled deps as well. This list of names can be passed to npm-packlist +// as the "bundled" argument. Additionally, packageJsonCache is shared so +// packlist doesn't have to re-read files already consumed in this pass + +const fs = require('fs') +const path = require('path') +const EE = require('events').EventEmitter + +class BundleWalker extends EE { + constructor (opt) { + opt = opt || {} + super(opt) + this.path = path.resolve(opt.path || process.cwd()) + + this.parent = opt.parent || null + if (this.parent) { + this.result = this.parent.result + // only collect results in node_modules folders at the top level + // since the node_modules in a bundled dep is included always + if (!this.parent.parent) { + const base = path.basename(this.path) + const scope = path.basename(path.dirname(this.path)) + this.result.add(/^@/.test(scope) ? scope + '/' + base : base) + } + this.root = this.parent.root + this.packageJsonCache = this.parent.packageJsonCache + } else { + this.result = new Set() + this.root = this.path + this.packageJsonCache = opt.packageJsonCache || new Map() + } + + this.seen = new Set() + this.didDone = false + this.children = 0 + this.node_modules = [] + this.package = null + this.bundle = null + } + + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'done' && this.didDone) { + this.emit('done', this.result) + } + return ret + } + + done () { + if (!this.didDone) { + this.didDone = true + if (!this.parent) { + const res = Array.from(this.result) + this.result = res + this.emit('done', res) + } else { + this.emit('done') + } + } + } + + start () { + const pj = path.resolve(this.path, 'package.json') + if (this.packageJsonCache.has(pj)) + this.onPackage(this.packageJsonCache.get(pj)) + else + this.readPackageJson(pj) + return this + } + + readPackageJson (pj) { + fs.readFile(pj, (er, data) => + er ? this.done() : this.onPackageJson(pj, data)) + } + + onPackageJson (pj, data) { + try { + this.package = JSON.parse(data + '') + } catch (er) { + return this.done() + } + this.packageJsonCache.set(pj, this.package) + this.onPackage(this.package) + } + + onPackage (pkg) { + // all deps are bundled if we got here as a child. + // otherwise, only bundle bundledDeps + // Get a unique-ified array with a short-lived Set + const bdRaw = this.parent + ? Object.keys(pkg.dependencies || {}).concat( + Object.keys(pkg.optionalDependencies || {})) + : pkg.bundleDependencies || pkg.bundledDependencies || [] + + const bd = Array.from(new Set( + Array.isArray(bdRaw) ? bdRaw : Object.keys(bdRaw))) + + if (!bd.length) + return this.done() + + this.bundle = bd + const nm = this.path + '/node_modules' + this.readModules() + } + + readModules () { + readdirNodeModules(this.path + '/node_modules', (er, nm) => + er ? this.onReaddir([]) : this.onReaddir(nm)) + } + + onReaddir (nm) { + // keep track of what we have, in case children need it + this.node_modules = nm + + this.bundle.forEach(dep => this.childDep(dep)) + if (this.children === 0) + this.done() + } + + childDep (dep) { + if (this.node_modules.indexOf(dep) !== -1 && !this.seen.has(dep)) { + this.seen.add(dep) + this.child(dep) + } else if (this.parent) { + this.parent.childDep(dep) + } + } + + child (dep) { + const p = this.path + '/node_modules/' + dep + this.children += 1 + const child = new BundleWalker({ + path: p, + parent: this + }) + child.on('done', _ => { + if (--this.children === 0) + this.done() + }) + child.start() + } +} + +class BundleWalkerSync extends BundleWalker { + constructor (opt) { + super(opt) + } + + start () { + super.start() + this.done() + return this + } + + readPackageJson (pj) { + try { + this.onPackageJson(pj, fs.readFileSync(pj)) + } catch (er) {} + return this + } + + readModules () { + try { + this.onReaddir(readdirNodeModulesSync(this.path + '/node_modules')) + } catch (er) { + this.onReaddir([]) + } + } + + child (dep) { + new BundleWalkerSync({ + path: this.path + '/node_modules/' + dep, + parent: this + }).start() + } +} + +const readdirNodeModules = (nm, cb) => { + fs.readdir(nm, (er, set) => { + if (er) + cb(er) + else { + const scopes = set.filter(f => /^@/.test(f)) + if (!scopes.length) + cb(null, set) + else { + const unscoped = set.filter(f => !/^@/.test(f)) + let count = scopes.length + scopes.forEach(scope => { + fs.readdir(nm + '/' + scope, (er, pkgs) => { + if (er || !pkgs.length) + unscoped.push(scope) + else + unscoped.push.apply(unscoped, pkgs.map(p => scope + '/' + p)) + if (--count === 0) + cb(null, unscoped) + }) + }) + } + } + }) +} + +const readdirNodeModulesSync = nm => { + const set = fs.readdirSync(nm) + const unscoped = set.filter(f => !/^@/.test(f)) + const scopes = set.filter(f => /^@/.test(f)).map(scope => { + try { + const pkgs = fs.readdirSync(nm + '/' + scope) + return pkgs.length ? pkgs.map(p => scope + '/' + p) : [scope] + } catch (er) { + return [scope] + } + }).reduce((a, b) => a.concat(b), []) + return unscoped.concat(scopes) +} + +const walk = (options, callback) => { + const p = new Promise((resolve, reject) => { + new BundleWalker(options).on('done', resolve).on('error', reject).start() + }) + return callback ? p.then(res => callback(null, res), callback) : p +} + +const walkSync = options => { + return new BundleWalkerSync(options).start().result +} + +module.exports = walk +walk.sync = walkSync +walk.BundleWalker = BundleWalker +walk.BundleWalkerSync = BundleWalkerSync diff --git a/node_modules/fsevents/node_modules/npm-bundled/package.json b/node_modules/fsevents/node_modules/npm-bundled/package.json new file mode 100644 index 00000000..f48cda01 --- /dev/null +++ b/node_modules/fsevents/node_modules/npm-bundled/package.json @@ -0,0 +1,63 @@ +{ + "_args": [ + [ + "npm-bundled@1.0.6", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "npm-bundled@1.0.6", + "_id": "npm-bundled@1.0.6", + "_inBundle": false, + "_integrity": "sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g==", + "_location": "/npm-bundled", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "npm-bundled@1.0.6", + "name": "npm-bundled", + "escapedName": "npm-bundled", + "rawSpec": "1.0.6", + "saveSpec": null, + "fetchSpec": "1.0.6" + }, + "_requiredBy": [ + "/npm-packlist" + ], + "_resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.0.6.tgz", + "_spec": "1.0.6", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/npm/npm-bundled/issues" + }, + "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof", + "devDependencies": { + "mkdirp": "^0.5.1", + "mutate-fs": "^1.1.0", + "rimraf": "^2.6.1", + "tap": "^12.0.1" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/npm/npm-bundled#readme", + "license": "ISC", + "main": "index.js", + "name": "npm-bundled", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/npm-bundled.git" + }, + "scripts": { + "postpublish": "git push origin --all; git push origin --tags", + "postversion": "npm publish", + "preversion": "npm test", + "test": "tap test/*.js -J --100" + }, + "version": "1.0.6" +} diff --git a/node_modules/fsevents/node_modules/npm-packlist/LICENSE b/node_modules/fsevents/node_modules/npm-packlist/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/fsevents/node_modules/npm-packlist/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/npm-packlist/README.md b/node_modules/fsevents/node_modules/npm-packlist/README.md new file mode 100644 index 00000000..ead5821e --- /dev/null +++ b/node_modules/fsevents/node_modules/npm-packlist/README.md @@ -0,0 +1,68 @@ +# npm-packlist + +[![Build Status](https://travis-ci.com/npm/npm-packlist.svg?token=hHeDp9pQmz9kvsgRNVHy&branch=master)](https://travis-ci.com/npm/npm-packlist) + +Get a list of the files to add from a folder into an npm package + +These can be handed to [tar](http://npm.im/tar) like so to make an npm +package tarball: + +```js +const packlist = require('npm-packlist') +const tar = require('tar') +const packageDir = '/path/to/package' +const packageTarball = '/path/to/package.tgz' + +packlist({ path: packageDir }) + .then(files => tar.create({ + prefix: 'package/', + cwd: packageDir, + file: packageTarball, + gzip: true + }, files)) + .then(_ => { + // tarball has been created, continue with your day + }) +``` + +This uses the following rules: + +1. If a `package.json` file is found, and it has a `files` list, + then ignore everything that isn't in `files`. Always include the + readme, license, notice, changes, changelog, and history files, if + they exist, and the package.json file itself. +2. If there's no `package.json` file (or it has no `files` list), and + there is a `.npmignore` file, then ignore all the files in the + `.npmignore` file. +3. If there's no `package.json` with a `files` list, and there's no + `.npmignore` file, but there is a `.gitignore` file, then ignore + all the files in the `.gitignore` file. +4. Everything in the root `node_modules` is ignored, unless it's a + bundled dependency. If it IS a bundled dependency, and it's a + symbolic link, then the target of the link is included, not the + symlink itself. +4. Unless they're explicitly included (by being in a `files` list, or + a `!negated` rule in a relevant `.npmignore` or `.gitignore`), + always ignore certain common cruft files: + + 1. .npmignore and .gitignore files (their effect is in the package + already, there's no need to include them in the package) + 2. editor junk like `.*.swp`, `._*` and `.*.orig` files + 3. `.npmrc` files (these may contain private configs) + 4. The `node_modules/.bin` folder + 5. Waf and gyp cruft like `/build/config.gypi` and `.lock-wscript` + 6. Darwin's `.DS_Store` files because wtf are those even + 7. `npm-debug.log` files at the root of a project + + You can explicitly re-include any of these with a `files` list in + `package.json` or a negated ignore file rule. + +## API + +Same API as [ignore-walk](http://npm.im/ignore-walk), just hard-coded +file list and rule sets. + +The `Walker` and `WalkerSync` classes take a `bundled` argument, which +is a list of package names to include from node_modules. When calling +the top-level `packlist()` and `packlist.sync()` functions, this +module calls into `npm-bundled` directly. diff --git a/node_modules/fsevents/node_modules/npm-packlist/index.js b/node_modules/fsevents/node_modules/npm-packlist/index.js new file mode 100644 index 00000000..777b5859 --- /dev/null +++ b/node_modules/fsevents/node_modules/npm-packlist/index.js @@ -0,0 +1,250 @@ +'use strict' + +// Do a two-pass walk, first to get the list of packages that need to be +// bundled, then again to get the actual files and folders. +// Keep a cache of node_modules content and package.json data, so that the +// second walk doesn't have to re-do all the same work. + +const bundleWalk = require('npm-bundled') +const BundleWalker = bundleWalk.BundleWalker +const BundleWalkerSync = bundleWalk.BundleWalkerSync + +const ignoreWalk = require('ignore-walk') +const IgnoreWalker = ignoreWalk.Walker +const IgnoreWalkerSync = ignoreWalk.WalkerSync + +const rootBuiltinRules = Symbol('root-builtin-rules') +const packageNecessaryRules = Symbol('package-necessary-rules') +const path = require('path') + +const defaultRules = [ + '.npmignore', + '.gitignore', + '**/.git', + '**/.svn', + '**/.hg', + '**/CVS', + '**/.git/**', + '**/.svn/**', + '**/.hg/**', + '**/CVS/**', + '/.lock-wscript', + '/.wafpickle-*', + '/build/config.gypi', + 'npm-debug.log', + '**/.npmrc', + '.*.swp', + '**/.DS_Store/**', + '._*', + '**/._*/**', + '*.orig', + '/package-lock.json', + '/yarn.lock', + 'archived-packages/**', + 'core', + '!core/', + '!**/core/', + '*.core', + '*.vgcore', + 'vgcore.*', + 'core.+([0-9])', +] + +// a decorator that applies our custom rules to an ignore walker +const npmWalker = Class => class Walker extends Class { + constructor (opt) { + opt = opt || {} + + // the order in which rules are applied. + opt.ignoreFiles = [ + rootBuiltinRules, + 'package.json', + '.npmignore', + '.gitignore', + packageNecessaryRules + ] + + opt.includeEmpty = false + opt.path = opt.path || process.cwd() + const dirName = path.basename(opt.path) + const parentName = path.basename(path.dirname(opt.path)) + opt.follow = + dirName === 'node_modules' || + (parentName === 'node_modules' && /^@/.test(dirName)) + super(opt) + + // ignore a bunch of things by default at the root level. + // also ignore anything in node_modules, except bundled dependencies + if (!this.parent) { + this.bundled = opt.bundled || [] + this.bundledScopes = Array.from(new Set( + this.bundled.filter(f => /^@/.test(f)) + .map(f => f.split('/')[0]))) + const rules = defaultRules.join('\n') + '\n' + this.packageJsonCache = opt.packageJsonCache || new Map() + super.onReadIgnoreFile(rootBuiltinRules, rules, _=>_) + } else { + this.bundled = [] + this.bundledScopes = [] + this.packageJsonCache = this.parent.packageJsonCache + } + } + + filterEntry (entry, partial) { + // get the partial path from the root of the walk + const p = this.path.substr(this.root.length + 1) + const pkgre = /^node_modules\/(@[^\/]+\/?[^\/]+|[^\/]+)(\/.*)?$/ + const isRoot = !this.parent + const pkg = isRoot && pkgre.test(entry) ? + entry.replace(pkgre, '$1') : null + const rootNM = isRoot && entry === 'node_modules' + const rootPJ = isRoot && entry === 'package.json' + + return ( + // if we're in a bundled package, check with the parent. + /^node_modules($|\/)/i.test(p) ? this.parent.filterEntry( + this.basename + '/' + entry, partial) + + // if package is bundled, all files included + // also include @scope dirs for bundled scoped deps + // they'll be ignored if no files end up in them. + // However, this only matters if we're in the root. + // node_modules folders elsewhere, like lib/node_modules, + // should be included normally unless ignored. + : pkg ? -1 !== this.bundled.indexOf(pkg) || + -1 !== this.bundledScopes.indexOf(pkg) + + // only walk top node_modules if we want to bundle something + : rootNM ? !!this.bundled.length + + // always include package.json at the root. + : rootPJ ? true + + // otherwise, follow ignore-walk's logic + : super.filterEntry(entry, partial) + ) + } + + filterEntries () { + if (this.ignoreRules['package.json']) + this.ignoreRules['.gitignore'] = this.ignoreRules['.npmignore'] = null + else if (this.ignoreRules['.npmignore']) + this.ignoreRules['.gitignore'] = null + this.filterEntries = super.filterEntries + super.filterEntries() + } + + addIgnoreFile (file, then) { + const ig = path.resolve(this.path, file) + if (this.packageJsonCache.has(ig)) + this.onPackageJson(ig, this.packageJsonCache.get(ig), then) + else + super.addIgnoreFile(file, then) + } + + onPackageJson (ig, pkg, then) { + this.packageJsonCache.set(ig, pkg) + + // if there's a bin, browser or main, make sure we don't ignore it + // also, don't ignore the package.json itself! + const rules = [ + pkg.browser ? '!' + pkg.browser : '', + pkg.main ? '!' + pkg.main : '', + '!package.json', + '!@(readme|copying|license|licence|notice|changes|changelog|history){,.*[^~$]}' + ] + if (pkg.bin) + if (typeof pkg.bin === "object") + for (const key in pkg.bin) + rules.push('!' + pkg.bin[key]) + else + rules.push('!' + pkg.bin) + + const data = rules.filter(f => f).join('\n') + '\n' + super.onReadIgnoreFile(packageNecessaryRules, data, _=>_) + + if (Array.isArray(pkg.files)) + super.onReadIgnoreFile('package.json', '*\n' + pkg.files.map( + f => '!' + f + '\n!' + f.replace(/\/+$/, '') + '/**' + ).join('\n') + '\n', then) + else + then() + } + + // override parent onstat function to nix all symlinks + onstat (st, entry, file, dir, then) { + if (st.isSymbolicLink()) + then() + else + super.onstat(st, entry, file, dir, then) + } + + onReadIgnoreFile (file, data, then) { + if (file === 'package.json') + try { + const ig = path.resolve(this.path, file) + this.onPackageJson(ig, JSON.parse(data), then) + } catch (er) { + // ignore package.json files that are not json + then() + } + else + super.onReadIgnoreFile(file, data, then) + } + + sort (a, b) { + return sort(a, b) + } +} + +class Walker extends npmWalker(IgnoreWalker) { + walker (entry, then) { + new Walker(this.walkerOpt(entry)).on('done', then).start() + } +} + +class WalkerSync extends npmWalker(IgnoreWalkerSync) { + walker (entry, then) { + new WalkerSync(this.walkerOpt(entry)).start() + then() + } +} + +const walk = (options, callback) => { + options = options || {} + const p = new Promise((resolve, reject) => { + const bw = new BundleWalker(options) + bw.on('done', bundled => { + options.bundled = bundled + options.packageJsonCache = bw.packageJsonCache + new Walker(options).on('done', resolve).on('error', reject).start() + }) + bw.start() + }) + return callback ? p.then(res => callback(null, res), callback) : p +} + +const walkSync = options => { + options = options || {} + const bw = new BundleWalkerSync(options).start() + options.bundled = bw.result + options.packageJsonCache = bw.packageJsonCache + const walker = new WalkerSync(options) + walker.start() + return walker.result +} + +// package.json first, node_modules last, files before folders, alphasort +const sort = (a, b) => + a === 'package.json' ? -1 + : b === 'package.json' ? 1 + : /^node_modules/.test(a) && !/^node_modules/.test(b) ? 1 + : /^node_modules/.test(b) && !/^node_modules/.test(a) ? -1 + : path.dirname(a) === '.' && path.dirname(b) !== '.' ? -1 + : path.dirname(b) === '.' && path.dirname(a) !== '.' ? 1 + : a.localeCompare(b) + +module.exports = walk +walk.sync = walkSync +walk.Walker = Walker +walk.WalkerSync = WalkerSync diff --git a/node_modules/fsevents/node_modules/npm-packlist/package.json b/node_modules/fsevents/node_modules/npm-packlist/package.json new file mode 100644 index 00000000..1a8b308e --- /dev/null +++ b/node_modules/fsevents/node_modules/npm-packlist/package.json @@ -0,0 +1,69 @@ +{ + "_args": [ + [ + "npm-packlist@1.4.1", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "npm-packlist@1.4.1", + "_id": "npm-packlist@1.4.1", + "_inBundle": false, + "_integrity": "sha512-+TcdO7HJJ8peiiYhvPxsEDhF3PJFGUGRcFsGve3vxvxdcpO2Z4Z7rkosRM0kWj6LfbK/P0gu3dzk5RU1ffvFcw==", + "_location": "/npm-packlist", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "npm-packlist@1.4.1", + "name": "npm-packlist", + "escapedName": "npm-packlist", + "rawSpec": "1.4.1", + "saveSpec": null, + "fetchSpec": "1.4.1" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.1.tgz", + "_spec": "1.4.1", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/npm/npm-packlist/issues" + }, + "dependencies": { + "ignore-walk": "^3.0.1", + "npm-bundled": "^1.0.1" + }, + "description": "Get a list of the files to add from a folder into an npm package", + "devDependencies": { + "mkdirp": "^0.5.1", + "rimraf": "^2.6.1", + "tap": "^12.0.1" + }, + "directories": { + "test": "test" + }, + "files": [ + "index.js" + ], + "homepage": "https://www.npmjs.com/package/npm-packlist", + "license": "ISC", + "main": "index.js", + "name": "npm-packlist", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/npm-packlist.git" + }, + "scripts": { + "postpublish": "git push origin --all; git push origin --tags", + "postversion": "npm publish", + "preversion": "npm test", + "test": "tap test/*.js --100 -J" + }, + "version": "1.4.1" +} diff --git a/node_modules/fsevents/node_modules/npmlog/CHANGELOG.md b/node_modules/fsevents/node_modules/npmlog/CHANGELOG.md new file mode 100644 index 00000000..51e4abc0 --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/CHANGELOG.md @@ -0,0 +1,49 @@ +### v4.0.2 + +* Added installation instructions. + +### v4.0.1 + +* Fix bugs where `log.progressEnabled` got out of sync with how `gauge` kept + track of these things resulting in a progressbar that couldn't be disabled. + +### v4.0.0 + +* Allow creating log levels that are an empty string or 0. + +### v3.1.2 + +* Update to `gauge@1.6.0` adding support for default values for template + items. + +### v3.1.1 + +* Update to `gauge@1.5.3` to fix to `1.x` compatibility when it comes to + when a progress bar is enabled. In `1.x` if you didn't have a TTY the + progress bar was never shown. In `2.x` it merely defaults to disabled, + but you can enable it explicitly if you still want progress updates. + +### v3.1.0 + +* Update to `gauge@2.5.2`: + * Updates the `signal-exit` dependency which fixes an incompatibility with + the node profiler. + * Uses externalizes its ansi code generation in `console-control-strings` +* Make the default progress bar include the last line printed, colored as it + would be when printing to a tty. + +### v3.0.0 + +* Switch to `gauge@2.0.0`, for better performance, better look. +* Set stderr/stdout blocking if they're tty's, so that we can hide a + progress bar going to stderr and then safely print to stdout. Without + this the two can end up overlapping producing confusing and sometimes + corrupted output. + +### v2.0.0 + +* Make the `error` event non-fatal so that folks can use it as a prefix. + +### v1.0.0 + +* Add progress bar with `gauge@1.1.0` diff --git a/node_modules/fsevents/node_modules/npmlog/LICENSE b/node_modules/fsevents/node_modules/npmlog/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/npmlog/README.md b/node_modules/fsevents/node_modules/npmlog/README.md new file mode 100644 index 00000000..268a4af4 --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/README.md @@ -0,0 +1,216 @@ +# npmlog + +The logger util that npm uses. + +This logger is very basic. It does the logging for npm. It supports +custom levels and colored output. + +By default, logs are written to stderr. If you want to send log messages +to outputs other than streams, then you can change the `log.stream` +member, or you can just listen to the events that it emits, and do +whatever you want with them. + +# Installation + +```console +npm install npmlog --save +``` + +# Basic Usage + +```javascript +var log = require('npmlog') + +// additional stuff ---------------------------+ +// message ----------+ | +// prefix ----+ | | +// level -+ | | | +// v v v v + log.info('fyi', 'I have a kitty cat: %j', myKittyCat) +``` + +## log.level + +* {String} + +The level to display logs at. Any logs at or above this level will be +displayed. The special level `silent` will prevent anything from being +displayed ever. + +## log.record + +* {Array} + +An array of all the log messages that have been entered. + +## log.maxRecordSize + +* {Number} + +The maximum number of records to keep. If log.record gets bigger than +10% over this value, then it is sliced down to 90% of this value. + +The reason for the 10% window is so that it doesn't have to resize a +large array on every log entry. + +## log.prefixStyle + +* {Object} + +A style object that specifies how prefixes are styled. (See below) + +## log.headingStyle + +* {Object} + +A style object that specifies how the heading is styled. (See below) + +## log.heading + +* {String} Default: "" + +If set, a heading that is printed at the start of every line. + +## log.stream + +* {Stream} Default: `process.stderr` + +The stream where output is written. + +## log.enableColor() + +Force colors to be used on all messages, regardless of the output +stream. + +## log.disableColor() + +Disable colors on all messages. + +## log.enableProgress() + +Enable the display of log activity spinner and progress bar + +## log.disableProgress() + +Disable the display of a progress bar + +## log.enableUnicode() + +Force the unicode theme to be used for the progress bar. + +## log.disableUnicode() + +Disable the use of unicode in the progress bar. + +## log.setGaugeTemplate(template) + +Set a template for outputting the progress bar. See the [gauge documentation] for details. + +[gauge documentation]: https://npmjs.com/package/gauge + +## log.setGaugeThemeset(themes) + +Select a themeset to pick themes from for the progress bar. See the [gauge documentation] for details. + +## log.pause() + +Stop emitting messages to the stream, but do not drop them. + +## log.resume() + +Emit all buffered messages that were written while paused. + +## log.log(level, prefix, message, ...) + +* `level` {String} The level to emit the message at +* `prefix` {String} A string prefix. Set to "" to skip. +* `message...` Arguments to `util.format` + +Emit a log message at the specified level. + +## log\[level](prefix, message, ...) + +For example, + +* log.silly(prefix, message, ...) +* log.verbose(prefix, message, ...) +* log.info(prefix, message, ...) +* log.http(prefix, message, ...) +* log.warn(prefix, message, ...) +* log.error(prefix, message, ...) + +Like `log.log(level, prefix, message, ...)`. In this way, each level is +given a shorthand, so you can do `log.info(prefix, message)`. + +## log.addLevel(level, n, style, disp) + +* `level` {String} Level indicator +* `n` {Number} The numeric level +* `style` {Object} Object with fg, bg, inverse, etc. +* `disp` {String} Optional replacement for `level` in the output. + +Sets up a new level with a shorthand function and so forth. + +Note that if the number is `Infinity`, then setting the level to that +will cause all log messages to be suppressed. If the number is +`-Infinity`, then the only way to show it is to enable all log messages. + +## log.newItem(name, todo, weight) + +* `name` {String} Optional; progress item name. +* `todo` {Number} Optional; total amount of work to be done. Default 0. +* `weight` {Number} Optional; the weight of this item relative to others. Default 1. + +This adds a new `are-we-there-yet` item tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `Tracker` object. + +## log.newStream(name, todo, weight) + +This adds a new `are-we-there-yet` stream tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerStream` object. + +## log.newGroup(name, weight) + +This adds a new `are-we-there-yet` tracker group to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerGroup` object. + +# Events + +Events are all emitted with the message object. + +* `log` Emitted for all messages +* `log.` Emitted for all messages with the `` level. +* `` Messages with prefixes also emit their prefix as an event. + +# Style Objects + +Style objects can have the following fields: + +* `fg` {String} Color for the foreground text +* `bg` {String} Color for the background +* `bold`, `inverse`, `underline` {Boolean} Set the associated property +* `bell` {Boolean} Make a noise (This is pretty annoying, probably.) + +# Message Objects + +Every log event is emitted with a message object, and the `log.record` +list contains all of them that have been created. They have the +following fields: + +* `id` {Number} +* `level` {String} +* `prefix` {String} +* `message` {String} Result of `util.format()` +* `messageRaw` {Array} Arguments to `util.format()` + +# Blocking TTYs + +We use [`set-blocking`](https://npmjs.com/package/set-blocking) to set +stderr and stdout blocking if they are tty's and have the setBlocking call. +This is a work around for an issue in early versions of Node.js 6.x, which +made stderr and stdout non-blocking on OSX. (They are always blocking +Windows and were never blocking on Linux.) `npmlog` needs them to be blocking +so that it can allow output to stdout and stderr to be interlaced. diff --git a/node_modules/fsevents/node_modules/npmlog/log.js b/node_modules/fsevents/node_modules/npmlog/log.js new file mode 100644 index 00000000..341f3313 --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/log.js @@ -0,0 +1,309 @@ +'use strict' +var Progress = require('are-we-there-yet') +var Gauge = require('gauge') +var EE = require('events').EventEmitter +var log = exports = module.exports = new EE() +var util = require('util') + +var setBlocking = require('set-blocking') +var consoleControl = require('console-control-strings') + +setBlocking(true) +var stream = process.stderr +Object.defineProperty(log, 'stream', { + set: function (newStream) { + stream = newStream + if (this.gauge) this.gauge.setWriteTo(stream, stream) + }, + get: function () { + return stream + } +}) + +// by default, decide based on tty-ness. +var colorEnabled +log.useColor = function () { + return colorEnabled != null ? colorEnabled : stream.isTTY +} + +log.enableColor = function () { + colorEnabled = true + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} +log.disableColor = function () { + colorEnabled = false + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} + +// default level +log.level = 'info' + +log.gauge = new Gauge(stream, { + enabled: false, // no progress bars unless asked + theme: {hasColor: log.useColor()}, + template: [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', default: ''}, + ':', + {type: 'logline', kerning: 1, default: ''} + ] +}) + +log.tracker = new Progress.TrackerGroup() + +// we track this separately as we may need to temporarily disable the +// display of the status bar for our own loggy purposes. +log.progressEnabled = log.gauge.isEnabled() + +var unicodeEnabled + +log.enableUnicode = function () { + unicodeEnabled = true + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.disableUnicode = function () { + unicodeEnabled = false + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.setGaugeThemeset = function (themes) { + this.gauge.setThemeset(themes) +} + +log.setGaugeTemplate = function (template) { + this.gauge.setTemplate(template) +} + +log.enableProgress = function () { + if (this.progressEnabled) return + this.progressEnabled = true + this.tracker.on('change', this.showProgress) + if (this._pause) return + this.gauge.enable() +} + +log.disableProgress = function () { + if (!this.progressEnabled) return + this.progressEnabled = false + this.tracker.removeListener('change', this.showProgress) + this.gauge.disable() +} + +var trackerConstructors = ['newGroup', 'newItem', 'newStream'] + +var mixinLog = function (tracker) { + // mixin the public methods from log into the tracker + // (except: conflicts and one's we handle specially) + Object.keys(log).forEach(function (P) { + if (P[0] === '_') return + if (trackerConstructors.filter(function (C) { return C === P }).length) return + if (tracker[P]) return + if (typeof log[P] !== 'function') return + var func = log[P] + tracker[P] = function () { + return func.apply(log, arguments) + } + }) + // if the new tracker is a group, make sure any subtrackers get + // mixed in too + if (tracker instanceof Progress.TrackerGroup) { + trackerConstructors.forEach(function (C) { + var func = tracker[C] + tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) } + }) + } + return tracker +} + +// Add tracker constructors to the top level log object +trackerConstructors.forEach(function (C) { + log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) } +}) + +log.clearProgress = function (cb) { + if (!this.progressEnabled) return cb && process.nextTick(cb) + this.gauge.hide(cb) +} + +log.showProgress = function (name, completed) { + if (!this.progressEnabled) return + var values = {} + if (name) values.section = name + var last = log.record[log.record.length - 1] + if (last) { + values.subsection = last.prefix + var disp = log.disp[last.level] || last.level + var logline = this._format(disp, log.style[last.level]) + if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle) + logline += ' ' + last.message.split(/\r?\n/)[0] + values.logline = logline + } + values.completed = completed || this.tracker.completed() + this.gauge.show(values) +}.bind(log) // bind for use in tracker's on-change listener + +// temporarily stop emitting, but don't drop +log.pause = function () { + this._paused = true + if (this.progressEnabled) this.gauge.disable() +} + +log.resume = function () { + if (!this._paused) return + this._paused = false + + var b = this._buffer + this._buffer = [] + b.forEach(function (m) { + this.emitLog(m) + }, this) + if (this.progressEnabled) this.gauge.enable() +} + +log._buffer = [] + +var id = 0 +log.record = [] +log.maxRecordSize = 10000 +log.log = function (lvl, prefix, message) { + var l = this.levels[lvl] + if (l === undefined) { + return this.emit('error', new Error(util.format( + 'Undefined log level: %j', lvl))) + } + + var a = new Array(arguments.length - 2) + var stack = null + for (var i = 2; i < arguments.length; i++) { + var arg = a[i - 2] = arguments[i] + + // resolve stack traces to a plain string. + if (typeof arg === 'object' && arg && + (arg instanceof Error) && arg.stack) { + + Object.defineProperty(arg, 'stack', { + value: stack = arg.stack + '', + enumerable: true, + writable: true + }) + } + } + if (stack) a.unshift(stack + '\n') + message = util.format.apply(util, a) + + var m = { id: id++, + level: lvl, + prefix: String(prefix || ''), + message: message, + messageRaw: a } + + this.emit('log', m) + this.emit('log.' + lvl, m) + if (m.prefix) this.emit(m.prefix, m) + + this.record.push(m) + var mrs = this.maxRecordSize + var n = this.record.length - mrs + if (n > mrs / 10) { + var newSize = Math.floor(mrs * 0.9) + this.record = this.record.slice(-1 * newSize) + } + + this.emitLog(m) +}.bind(log) + +log.emitLog = function (m) { + if (this._paused) { + this._buffer.push(m) + return + } + if (this.progressEnabled) this.gauge.pulse(m.prefix) + var l = this.levels[m.level] + if (l === undefined) return + if (l < this.levels[this.level]) return + if (l > 0 && !isFinite(l)) return + + // If 'disp' is null or undefined, use the lvl as a default + // Allows: '', 0 as valid disp + var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level + this.clearProgress() + m.message.split(/\r?\n/).forEach(function (line) { + if (this.heading) { + this.write(this.heading, this.headingStyle) + this.write(' ') + } + this.write(disp, log.style[m.level]) + var p = m.prefix || '' + if (p) this.write(' ') + this.write(p, this.prefixStyle) + this.write(' ' + line + '\n') + }, this) + this.showProgress() +} + +log._format = function (msg, style) { + if (!stream) return + + var output = '' + if (this.useColor()) { + style = style || {} + var settings = [] + if (style.fg) settings.push(style.fg) + if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + if (style.bold) settings.push('bold') + if (style.underline) settings.push('underline') + if (style.inverse) settings.push('inverse') + if (settings.length) output += consoleControl.color(settings) + if (style.beep) output += consoleControl.beep() + } + output += msg + if (this.useColor()) { + output += consoleControl.color('reset') + } + return output +} + +log.write = function (msg, style) { + if (!stream) return + + stream.write(this._format(msg, style)) +} + +log.addLevel = function (lvl, n, style, disp) { + // If 'disp' is null or undefined, use the lvl as a default + if (disp == null) disp = lvl + this.levels[lvl] = n + this.style[lvl] = style + if (!this[lvl]) { + this[lvl] = function () { + var a = new Array(arguments.length + 1) + a[0] = lvl + for (var i = 0; i < arguments.length; i++) { + a[i + 1] = arguments[i] + } + return this.log.apply(this, a) + }.bind(this) + } + this.disp[lvl] = disp +} + +log.prefixStyle = { fg: 'magenta' } +log.headingStyle = { fg: 'white', bg: 'black' } + +log.style = {} +log.levels = {} +log.disp = {} +log.addLevel('silly', -Infinity, { inverse: true }, 'sill') +log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') +log.addLevel('info', 2000, { fg: 'green' }) +log.addLevel('timing', 2500, { fg: 'green', bg: 'black' }) +log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) +log.addLevel('notice', 3500, { fg: 'blue', bg: 'black' }) +log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') +log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') +log.addLevel('silent', Infinity) + +// allow 'error' prefix +log.on('error', function () {}) diff --git a/node_modules/fsevents/node_modules/npmlog/package.json b/node_modules/fsevents/node_modules/npmlog/package.json new file mode 100644 index 00000000..544b0c65 --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/package.json @@ -0,0 +1,64 @@ +{ + "_args": [ + [ + "npmlog@4.1.2", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "npmlog@4.1.2", + "_id": "npmlog@4.1.2", + "_inBundle": false, + "_integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "_location": "/npmlog", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "npmlog@4.1.2", + "name": "npmlog", + "escapedName": "npmlog", + "rawSpec": "4.1.2", + "saveSpec": null, + "fetchSpec": "4.1.2" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "_spec": "4.1.2", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/npm/npmlog/issues" + }, + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + }, + "description": "logger for npm", + "devDependencies": { + "standard": "~7.1.2", + "tap": "~5.7.3" + }, + "files": [ + "log.js" + ], + "homepage": "https://github.com/npm/npmlog#readme", + "license": "ISC", + "main": "log.js", + "name": "npmlog", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/npmlog.git" + }, + "scripts": { + "test": "standard && tap test/*.js" + }, + "version": "4.1.2" +} diff --git a/node_modules/fsevents/node_modules/number-is-nan/index.js b/node_modules/fsevents/node_modules/number-is-nan/index.js new file mode 100644 index 00000000..79be4b9c --- /dev/null +++ b/node_modules/fsevents/node_modules/number-is-nan/index.js @@ -0,0 +1,4 @@ +'use strict'; +module.exports = Number.isNaN || function (x) { + return x !== x; +}; diff --git a/node_modules/fsevents/node_modules/number-is-nan/license b/node_modules/fsevents/node_modules/number-is-nan/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/fsevents/node_modules/number-is-nan/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/number-is-nan/package.json b/node_modules/fsevents/node_modules/number-is-nan/package.json new file mode 100644 index 00000000..53336826 --- /dev/null +++ b/node_modules/fsevents/node_modules/number-is-nan/package.json @@ -0,0 +1,70 @@ +{ + "_args": [ + [ + "number-is-nan@1.0.1", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "number-is-nan@1.0.1", + "_id": "number-is-nan@1.0.1", + "_inBundle": false, + "_integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "_location": "/number-is-nan", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "number-is-nan@1.0.1", + "name": "number-is-nan", + "escapedName": "number-is-nan", + "rawSpec": "1.0.1", + "saveSpec": null, + "fetchSpec": "1.0.1" + }, + "_requiredBy": [ + "/is-fullwidth-code-point" + ], + "_resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "_spec": "1.0.1", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/number-is-nan/issues" + }, + "description": "ES2015 Number.isNaN() ponyfill", + "devDependencies": { + "ava": "*" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/sindresorhus/number-is-nan#readme", + "keywords": [ + "es2015", + "ecmascript", + "ponyfill", + "polyfill", + "shim", + "number", + "is", + "nan", + "not" + ], + "license": "MIT", + "name": "number-is-nan", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/number-is-nan.git" + }, + "scripts": { + "test": "ava" + }, + "version": "1.0.1" +} diff --git a/node_modules/fsevents/node_modules/number-is-nan/readme.md b/node_modules/fsevents/node_modules/number-is-nan/readme.md new file mode 100644 index 00000000..24635087 --- /dev/null +++ b/node_modules/fsevents/node_modules/number-is-nan/readme.md @@ -0,0 +1,28 @@ +# number-is-nan [![Build Status](https://travis-ci.org/sindresorhus/number-is-nan.svg?branch=master)](https://travis-ci.org/sindresorhus/number-is-nan) + +> ES2015 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save number-is-nan +``` + + +## Usage + +```js +var numberIsNan = require('number-is-nan'); + +numberIsNan(NaN); +//=> true + +numberIsNan('unicorn'); +//=> false +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/object-assign/index.js b/node_modules/fsevents/node_modules/object-assign/index.js new file mode 100644 index 00000000..0930cf88 --- /dev/null +++ b/node_modules/fsevents/node_modules/object-assign/index.js @@ -0,0 +1,90 @@ +/* +object-assign +(c) Sindre Sorhus +@license MIT +*/ + +'use strict'; +/* eslint-disable no-unused-vars */ +var getOwnPropertySymbols = Object.getOwnPropertySymbols; +var hasOwnProperty = Object.prototype.hasOwnProperty; +var propIsEnumerable = Object.prototype.propertyIsEnumerable; + +function toObject(val) { + if (val === null || val === undefined) { + throw new TypeError('Object.assign cannot be called with null or undefined'); + } + + return Object(val); +} + +function shouldUseNative() { + try { + if (!Object.assign) { + return false; + } + + // Detect buggy property enumeration order in older V8 versions. + + // https://bugs.chromium.org/p/v8/issues/detail?id=4118 + var test1 = new String('abc'); // eslint-disable-line no-new-wrappers + test1[5] = 'de'; + if (Object.getOwnPropertyNames(test1)[0] === '5') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test2 = {}; + for (var i = 0; i < 10; i++) { + test2['_' + String.fromCharCode(i)] = i; + } + var order2 = Object.getOwnPropertyNames(test2).map(function (n) { + return test2[n]; + }); + if (order2.join('') !== '0123456789') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test3 = {}; + 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { + test3[letter] = letter; + }); + if (Object.keys(Object.assign({}, test3)).join('') !== + 'abcdefghijklmnopqrst') { + return false; + } + + return true; + } catch (err) { + // We don't expect any of the above to throw, but better to be safe. + return false; + } +} + +module.exports = shouldUseNative() ? Object.assign : function (target, source) { + var from; + var to = toObject(target); + var symbols; + + for (var s = 1; s < arguments.length; s++) { + from = Object(arguments[s]); + + for (var key in from) { + if (hasOwnProperty.call(from, key)) { + to[key] = from[key]; + } + } + + if (getOwnPropertySymbols) { + symbols = getOwnPropertySymbols(from); + for (var i = 0; i < symbols.length; i++) { + if (propIsEnumerable.call(from, symbols[i])) { + to[symbols[i]] = from[symbols[i]]; + } + } + } + } + + return to; +}; diff --git a/node_modules/fsevents/node_modules/object-assign/license b/node_modules/fsevents/node_modules/object-assign/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/fsevents/node_modules/object-assign/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/object-assign/package.json b/node_modules/fsevents/node_modules/object-assign/package.json new file mode 100644 index 00000000..7e828d7b --- /dev/null +++ b/node_modules/fsevents/node_modules/object-assign/package.json @@ -0,0 +1,77 @@ +{ + "_args": [ + [ + "object-assign@4.1.1", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "object-assign@4.1.1", + "_id": "object-assign@4.1.1", + "_inBundle": false, + "_integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "_location": "/object-assign", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "object-assign@4.1.1", + "name": "object-assign", + "escapedName": "object-assign", + "rawSpec": "4.1.1", + "saveSpec": null, + "fetchSpec": "4.1.1" + }, + "_requiredBy": [ + "/gauge" + ], + "_resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "_spec": "4.1.1", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/object-assign/issues" + }, + "description": "ES2015 `Object.assign()` ponyfill", + "devDependencies": { + "ava": "^0.16.0", + "lodash": "^4.16.4", + "matcha": "^0.7.0", + "xo": "^0.16.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/sindresorhus/object-assign#readme", + "keywords": [ + "object", + "assign", + "extend", + "properties", + "es2015", + "ecmascript", + "harmony", + "ponyfill", + "prollyfill", + "polyfill", + "shim", + "browser" + ], + "license": "MIT", + "name": "object-assign", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/object-assign.git" + }, + "scripts": { + "bench": "matcha bench.js", + "test": "xo && ava" + }, + "version": "4.1.1" +} diff --git a/node_modules/fsevents/node_modules/object-assign/readme.md b/node_modules/fsevents/node_modules/object-assign/readme.md new file mode 100644 index 00000000..1be09d35 --- /dev/null +++ b/node_modules/fsevents/node_modules/object-assign/readme.md @@ -0,0 +1,61 @@ +# object-assign [![Build Status](https://travis-ci.org/sindresorhus/object-assign.svg?branch=master)](https://travis-ci.org/sindresorhus/object-assign) + +> ES2015 [`Object.assign()`](http://www.2ality.com/2014/01/object-assign.html) [ponyfill](https://ponyfill.com) + + +## Use the built-in + +Node.js 4 and up, as well as every evergreen browser (Chrome, Edge, Firefox, Opera, Safari), +support `Object.assign()` :tada:. If you target only those environments, then by all +means, use `Object.assign()` instead of this package. + + +## Install + +``` +$ npm install --save object-assign +``` + + +## Usage + +```js +const objectAssign = require('object-assign'); + +objectAssign({foo: 0}, {bar: 1}); +//=> {foo: 0, bar: 1} + +// multiple sources +objectAssign({foo: 0}, {bar: 1}, {baz: 2}); +//=> {foo: 0, bar: 1, baz: 2} + +// overwrites equal keys +objectAssign({foo: 0}, {foo: 1}, {foo: 2}); +//=> {foo: 2} + +// ignores null and undefined sources +objectAssign({foo: 0}, null, {bar: 1}, undefined); +//=> {foo: 0, bar: 1} +``` + + +## API + +### objectAssign(target, [source, ...]) + +Assigns enumerable own properties of `source` objects to the `target` object and returns the `target` object. Additional `source` objects will overwrite previous ones. + + +## Resources + +- [ES2015 spec - Object.assign](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-object.assign) + + +## Related + +- [deep-assign](https://github.com/sindresorhus/deep-assign) - Recursive `Object.assign()` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/once/LICENSE b/node_modules/fsevents/node_modules/once/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/fsevents/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/once/README.md b/node_modules/fsevents/node_modules/once/README.md new file mode 100644 index 00000000..1f1ffca9 --- /dev/null +++ b/node_modules/fsevents/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/node_modules/fsevents/node_modules/once/once.js b/node_modules/fsevents/node_modules/once/once.js new file mode 100644 index 00000000..23540673 --- /dev/null +++ b/node_modules/fsevents/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/node_modules/fsevents/node_modules/once/package.json b/node_modules/fsevents/node_modules/once/package.json new file mode 100644 index 00000000..ad732116 --- /dev/null +++ b/node_modules/fsevents/node_modules/once/package.json @@ -0,0 +1,70 @@ +{ + "_args": [ + [ + "once@1.4.0", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "once@1.4.0", + "_id": "once@1.4.0", + "_inBundle": false, + "_integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "_location": "/once", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "once@1.4.0", + "name": "once", + "escapedName": "once", + "rawSpec": "1.4.0", + "saveSpec": null, + "fetchSpec": "1.4.0" + }, + "_requiredBy": [ + "/glob", + "/inflight" + ], + "_resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "_spec": "1.4.0", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/once/issues" + }, + "dependencies": { + "wrappy": "1" + }, + "description": "Run a function exactly one time", + "devDependencies": { + "tap": "^7.0.1" + }, + "directories": { + "test": "test" + }, + "files": [ + "once.js" + ], + "homepage": "https://github.com/isaacs/once#readme", + "keywords": [ + "once", + "function", + "one", + "single" + ], + "license": "ISC", + "main": "once.js", + "name": "once", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "1.4.0" +} diff --git a/node_modules/fsevents/node_modules/os-homedir/index.js b/node_modules/fsevents/node_modules/os-homedir/index.js new file mode 100644 index 00000000..33066166 --- /dev/null +++ b/node_modules/fsevents/node_modules/os-homedir/index.js @@ -0,0 +1,24 @@ +'use strict'; +var os = require('os'); + +function homedir() { + var env = process.env; + var home = env.HOME; + var user = env.LOGNAME || env.USER || env.LNAME || env.USERNAME; + + if (process.platform === 'win32') { + return env.USERPROFILE || env.HOMEDRIVE + env.HOMEPATH || home || null; + } + + if (process.platform === 'darwin') { + return home || (user ? '/Users/' + user : null); + } + + if (process.platform === 'linux') { + return home || (process.getuid() === 0 ? '/root' : (user ? '/home/' + user : null)); + } + + return home || null; +} + +module.exports = typeof os.homedir === 'function' ? os.homedir : homedir; diff --git a/node_modules/fsevents/node_modules/os-homedir/license b/node_modules/fsevents/node_modules/os-homedir/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/fsevents/node_modules/os-homedir/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/os-homedir/package.json b/node_modules/fsevents/node_modules/os-homedir/package.json new file mode 100644 index 00000000..131e23f8 --- /dev/null +++ b/node_modules/fsevents/node_modules/os-homedir/package.json @@ -0,0 +1,76 @@ +{ + "_args": [ + [ + "os-homedir@1.0.2", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "os-homedir@1.0.2", + "_id": "os-homedir@1.0.2", + "_inBundle": false, + "_integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", + "_location": "/os-homedir", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "os-homedir@1.0.2", + "name": "os-homedir", + "escapedName": "os-homedir", + "rawSpec": "1.0.2", + "saveSpec": null, + "fetchSpec": "1.0.2" + }, + "_requiredBy": [ + "/osenv" + ], + "_resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "_spec": "1.0.2", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/os-homedir/issues" + }, + "description": "Node.js 4 `os.homedir()` ponyfill", + "devDependencies": { + "ava": "*", + "path-exists": "^2.0.0", + "xo": "^0.16.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/sindresorhus/os-homedir#readme", + "keywords": [ + "builtin", + "core", + "ponyfill", + "polyfill", + "shim", + "os", + "homedir", + "home", + "dir", + "directory", + "folder", + "user", + "path" + ], + "license": "MIT", + "name": "os-homedir", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/os-homedir.git" + }, + "scripts": { + "test": "xo && ava" + }, + "version": "1.0.2" +} diff --git a/node_modules/fsevents/node_modules/os-homedir/readme.md b/node_modules/fsevents/node_modules/os-homedir/readme.md new file mode 100644 index 00000000..856ae615 --- /dev/null +++ b/node_modules/fsevents/node_modules/os-homedir/readme.md @@ -0,0 +1,31 @@ +# os-homedir [![Build Status](https://travis-ci.org/sindresorhus/os-homedir.svg?branch=master)](https://travis-ci.org/sindresorhus/os-homedir) + +> Node.js 4 [`os.homedir()`](https://nodejs.org/api/os.html#os_os_homedir) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save os-homedir +``` + + +## Usage + +```js +const osHomedir = require('os-homedir'); + +console.log(osHomedir()); +//=> '/Users/sindresorhus' +``` + + +## Related + +- [user-home](https://github.com/sindresorhus/user-home) - Same as this module but caches the result +- [home-or-tmp](https://github.com/sindresorhus/home-or-tmp) - Get the user home directory with fallback to the system temp directory + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/os-tmpdir/index.js b/node_modules/fsevents/node_modules/os-tmpdir/index.js new file mode 100644 index 00000000..2077b1ce --- /dev/null +++ b/node_modules/fsevents/node_modules/os-tmpdir/index.js @@ -0,0 +1,25 @@ +'use strict'; +var isWindows = process.platform === 'win32'; +var trailingSlashRe = isWindows ? /[^:]\\$/ : /.\/$/; + +// https://github.com/nodejs/node/blob/3e7a14381497a3b73dda68d05b5130563cdab420/lib/os.js#L25-L43 +module.exports = function () { + var path; + + if (isWindows) { + path = process.env.TEMP || + process.env.TMP || + (process.env.SystemRoot || process.env.windir) + '\\temp'; + } else { + path = process.env.TMPDIR || + process.env.TMP || + process.env.TEMP || + '/tmp'; + } + + if (trailingSlashRe.test(path)) { + path = path.slice(0, -1); + } + + return path; +}; diff --git a/node_modules/fsevents/node_modules/os-tmpdir/license b/node_modules/fsevents/node_modules/os-tmpdir/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/fsevents/node_modules/os-tmpdir/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/os-tmpdir/package.json b/node_modules/fsevents/node_modules/os-tmpdir/package.json new file mode 100644 index 00000000..ffb78997 --- /dev/null +++ b/node_modules/fsevents/node_modules/os-tmpdir/package.json @@ -0,0 +1,76 @@ +{ + "_args": [ + [ + "os-tmpdir@1.0.2", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "os-tmpdir@1.0.2", + "_id": "os-tmpdir@1.0.2", + "_inBundle": false, + "_integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "_location": "/os-tmpdir", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "os-tmpdir@1.0.2", + "name": "os-tmpdir", + "escapedName": "os-tmpdir", + "rawSpec": "1.0.2", + "saveSpec": null, + "fetchSpec": "1.0.2" + }, + "_requiredBy": [ + "/osenv" + ], + "_resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "_spec": "1.0.2", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/os-tmpdir/issues" + }, + "description": "Node.js os.tmpdir() ponyfill", + "devDependencies": { + "ava": "*", + "xo": "^0.16.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/sindresorhus/os-tmpdir#readme", + "keywords": [ + "built-in", + "core", + "ponyfill", + "polyfill", + "shim", + "os", + "tmpdir", + "tempdir", + "tmp", + "temp", + "dir", + "directory", + "env", + "environment" + ], + "license": "MIT", + "name": "os-tmpdir", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/os-tmpdir.git" + }, + "scripts": { + "test": "xo && ava" + }, + "version": "1.0.2" +} diff --git a/node_modules/fsevents/node_modules/os-tmpdir/readme.md b/node_modules/fsevents/node_modules/os-tmpdir/readme.md new file mode 100644 index 00000000..c09f7ed8 --- /dev/null +++ b/node_modules/fsevents/node_modules/os-tmpdir/readme.md @@ -0,0 +1,32 @@ +# os-tmpdir [![Build Status](https://travis-ci.org/sindresorhus/os-tmpdir.svg?branch=master)](https://travis-ci.org/sindresorhus/os-tmpdir) + +> Node.js [`os.tmpdir()`](https://nodejs.org/api/os.html#os_os_tmpdir) [ponyfill](https://ponyfill.com) + +Use this instead of `require('os').tmpdir()` to get a consistent behavior on different Node.js versions (even 0.8). + + +## Install + +``` +$ npm install --save os-tmpdir +``` + + +## Usage + +```js +const osTmpdir = require('os-tmpdir'); + +osTmpdir(); +//=> '/var/folders/m3/5574nnhn0yj488ccryqr7tc80000gn/T' +``` + + +## API + +See the [`os.tmpdir()` docs](https://nodejs.org/api/os.html#os_os_tmpdir). + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/osenv/LICENSE b/node_modules/fsevents/node_modules/osenv/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/fsevents/node_modules/osenv/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/osenv/README.md b/node_modules/fsevents/node_modules/osenv/README.md new file mode 100644 index 00000000..08fd9002 --- /dev/null +++ b/node_modules/fsevents/node_modules/osenv/README.md @@ -0,0 +1,63 @@ +# osenv + +Look up environment settings specific to different operating systems. + +## Usage + +```javascript +var osenv = require('osenv') +var path = osenv.path() +var user = osenv.user() +// etc. + +// Some things are not reliably in the env, and have a fallback command: +var h = osenv.hostname(function (er, hostname) { + h = hostname +}) +// This will still cause it to be memoized, so calling osenv.hostname() +// is now an immediate operation. + +// You can always send a cb, which will get called in the nextTick +// if it's been memoized, or wait for the fallback data if it wasn't +// found in the environment. +osenv.hostname(function (er, hostname) { + if (er) console.error('error looking up hostname') + else console.log('this machine calls itself %s', hostname) +}) +``` + +## osenv.hostname() + +The machine name. Calls `hostname` if not found. + +## osenv.user() + +The currently logged-in user. Calls `whoami` if not found. + +## osenv.prompt() + +Either PS1 on unix, or PROMPT on Windows. + +## osenv.tmpdir() + +The place where temporary files should be created. + +## osenv.home() + +No place like it. + +## osenv.path() + +An array of the places that the operating system will search for +executables. + +## osenv.editor() + +Return the executable name of the editor program. This uses the EDITOR +and VISUAL environment variables, and falls back to `vi` on Unix, or +`notepad.exe` on Windows. + +## osenv.shell() + +The SHELL on Unix, which Windows calls the ComSpec. Defaults to 'bash' +or 'cmd'. diff --git a/node_modules/fsevents/node_modules/osenv/osenv.js b/node_modules/fsevents/node_modules/osenv/osenv.js new file mode 100644 index 00000000..702a95b9 --- /dev/null +++ b/node_modules/fsevents/node_modules/osenv/osenv.js @@ -0,0 +1,72 @@ +var isWindows = process.platform === 'win32' +var path = require('path') +var exec = require('child_process').exec +var osTmpdir = require('os-tmpdir') +var osHomedir = require('os-homedir') + +// looking up envs is a bit costly. +// Also, sometimes we want to have a fallback +// Pass in a callback to wait for the fallback on failures +// After the first lookup, always returns the same thing. +function memo (key, lookup, fallback) { + var fell = false + var falling = false + exports[key] = function (cb) { + var val = lookup() + if (!val && !fell && !falling && fallback) { + fell = true + falling = true + exec(fallback, function (er, output, stderr) { + falling = false + if (er) return // oh well, we tried + val = output.trim() + }) + } + exports[key] = function (cb) { + if (cb) process.nextTick(cb.bind(null, null, val)) + return val + } + if (cb && !falling) process.nextTick(cb.bind(null, null, val)) + return val + } +} + +memo('user', function () { + return ( isWindows + ? process.env.USERDOMAIN + '\\' + process.env.USERNAME + : process.env.USER + ) +}, 'whoami') + +memo('prompt', function () { + return isWindows ? process.env.PROMPT : process.env.PS1 +}) + +memo('hostname', function () { + return isWindows ? process.env.COMPUTERNAME : process.env.HOSTNAME +}, 'hostname') + +memo('tmpdir', function () { + return osTmpdir() +}) + +memo('home', function () { + return osHomedir() +}) + +memo('path', function () { + return (process.env.PATH || + process.env.Path || + process.env.path).split(isWindows ? ';' : ':') +}) + +memo('editor', function () { + return process.env.EDITOR || + process.env.VISUAL || + (isWindows ? 'notepad.exe' : 'vi') +}) + +memo('shell', function () { + return isWindows ? process.env.ComSpec || 'cmd' + : process.env.SHELL || 'bash' +}) diff --git a/node_modules/fsevents/node_modules/osenv/package.json b/node_modules/fsevents/node_modules/osenv/package.json new file mode 100644 index 00000000..de300e5b --- /dev/null +++ b/node_modules/fsevents/node_modules/osenv/package.json @@ -0,0 +1,76 @@ +{ + "_args": [ + [ + "osenv@0.1.5", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "osenv@0.1.5", + "_id": "osenv@0.1.5", + "_inBundle": false, + "_integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", + "_location": "/osenv", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "osenv@0.1.5", + "name": "osenv", + "escapedName": "osenv", + "rawSpec": "0.1.5", + "saveSpec": null, + "fetchSpec": "0.1.5" + }, + "_requiredBy": [ + "/nopt" + ], + "_resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "_spec": "0.1.5", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/npm/osenv/issues" + }, + "dependencies": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + }, + "description": "Look up environment settings specific to different operating systems", + "devDependencies": { + "tap": "^11.1.0" + }, + "directories": { + "test": "test" + }, + "files": [ + "osenv.js" + ], + "homepage": "https://github.com/npm/osenv#readme", + "keywords": [ + "environment", + "variable", + "home", + "tmpdir", + "path", + "prompt", + "ps1" + ], + "license": "ISC", + "main": "osenv.js", + "name": "osenv", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/osenv.git" + }, + "scripts": { + "postpublish": "git push origin --all; git push origin --tags", + "postversion": "npm publish", + "preversion": "npm test", + "test": "tap test/*.js" + }, + "version": "0.1.5" +} diff --git a/node_modules/fsevents/node_modules/path-is-absolute/index.js b/node_modules/fsevents/node_modules/path-is-absolute/index.js new file mode 100644 index 00000000..22aa6c35 --- /dev/null +++ b/node_modules/fsevents/node_modules/path-is-absolute/index.js @@ -0,0 +1,20 @@ +'use strict'; + +function posix(path) { + return path.charAt(0) === '/'; +} + +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; diff --git a/node_modules/fsevents/node_modules/path-is-absolute/license b/node_modules/fsevents/node_modules/path-is-absolute/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/fsevents/node_modules/path-is-absolute/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/path-is-absolute/package.json b/node_modules/fsevents/node_modules/path-is-absolute/package.json new file mode 100644 index 00000000..402f37ba --- /dev/null +++ b/node_modules/fsevents/node_modules/path-is-absolute/package.json @@ -0,0 +1,78 @@ +{ + "_args": [ + [ + "path-is-absolute@1.0.1", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "path-is-absolute@1.0.1", + "_id": "path-is-absolute@1.0.1", + "_inBundle": false, + "_integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "_location": "/path-is-absolute", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "path-is-absolute@1.0.1", + "name": "path-is-absolute", + "escapedName": "path-is-absolute", + "rawSpec": "1.0.1", + "saveSpec": null, + "fetchSpec": "1.0.1" + }, + "_requiredBy": [ + "/glob" + ], + "_resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "_spec": "1.0.1", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/path-is-absolute/issues" + }, + "description": "Node.js 0.12 path.isAbsolute() ponyfill", + "devDependencies": { + "xo": "^0.16.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/sindresorhus/path-is-absolute#readme", + "keywords": [ + "path", + "paths", + "file", + "dir", + "absolute", + "isabsolute", + "is-absolute", + "built-in", + "util", + "utils", + "core", + "ponyfill", + "polyfill", + "shim", + "is", + "detect", + "check" + ], + "license": "MIT", + "name": "path-is-absolute", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/path-is-absolute.git" + }, + "scripts": { + "test": "xo && node test.js" + }, + "version": "1.0.1" +} diff --git a/node_modules/fsevents/node_modules/path-is-absolute/readme.md b/node_modules/fsevents/node_modules/path-is-absolute/readme.md new file mode 100644 index 00000000..8dbdf5fc --- /dev/null +++ b/node_modules/fsevents/node_modules/path-is-absolute/readme.md @@ -0,0 +1,59 @@ +# path-is-absolute [![Build Status](https://travis-ci.org/sindresorhus/path-is-absolute.svg?branch=master)](https://travis-ci.org/sindresorhus/path-is-absolute) + +> Node.js 0.12 [`path.isAbsolute()`](http://nodejs.org/api/path.html#path_path_isabsolute_path) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save path-is-absolute +``` + + +## Usage + +```js +const pathIsAbsolute = require('path-is-absolute'); + +// Running on Linux +pathIsAbsolute('/home/foo'); +//=> true +pathIsAbsolute('C:/Users/foo'); +//=> false + +// Running on Windows +pathIsAbsolute('C:/Users/foo'); +//=> true +pathIsAbsolute('/home/foo'); +//=> false + +// Running on any OS +pathIsAbsolute.posix('/home/foo'); +//=> true +pathIsAbsolute.posix('C:/Users/foo'); +//=> false +pathIsAbsolute.win32('C:/Users/foo'); +//=> true +pathIsAbsolute.win32('/home/foo'); +//=> false +``` + + +## API + +See the [`path.isAbsolute()` docs](http://nodejs.org/api/path.html#path_path_isabsolute_path). + +### pathIsAbsolute(path) + +### pathIsAbsolute.posix(path) + +POSIX specific version. + +### pathIsAbsolute.win32(path) + +Windows specific version. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/process-nextick-args/index.js b/node_modules/fsevents/node_modules/process-nextick-args/index.js new file mode 100644 index 00000000..5f585e8e --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/index.js @@ -0,0 +1,44 @@ +'use strict'; + +if (!process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} + diff --git a/node_modules/fsevents/node_modules/process-nextick-args/license.md b/node_modules/fsevents/node_modules/process-nextick-args/license.md new file mode 100644 index 00000000..c67e3532 --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/node_modules/fsevents/node_modules/process-nextick-args/package.json b/node_modules/fsevents/node_modules/process-nextick-args/package.json new file mode 100644 index 00000000..cfd91239 --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/package.json @@ -0,0 +1,53 @@ +{ + "_args": [ + [ + "process-nextick-args@2.0.0", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "process-nextick-args@2.0.0", + "_id": "process-nextick-args@2.0.0", + "_inBundle": false, + "_integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==", + "_location": "/process-nextick-args", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "process-nextick-args@2.0.0", + "name": "process-nextick-args", + "escapedName": "process-nextick-args", + "rawSpec": "2.0.0", + "saveSpec": null, + "fetchSpec": "2.0.0" + }, + "_requiredBy": [ + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", + "_spec": "2.0.0", + "_where": "/Users/pipobscure/fsevents", + "author": "", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "description": "process.nextTick but always with args", + "devDependencies": { + "tap": "~0.2.6" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "license": "MIT", + "main": "index.js", + "name": "process-nextick-args", + "repository": { + "type": "git", + "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "2.0.0" +} diff --git a/node_modules/fsevents/node_modules/process-nextick-args/readme.md b/node_modules/fsevents/node_modules/process-nextick-args/readme.md new file mode 100644 index 00000000..ecb432c9 --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var pna = require('process-nextick-args'); + +pna.nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/node_modules/fsevents/node_modules/rc/LICENSE.APACHE2 b/node_modules/fsevents/node_modules/rc/LICENSE.APACHE2 new file mode 100644 index 00000000..6366c047 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/LICENSE.APACHE2 @@ -0,0 +1,15 @@ +Apache License, Version 2.0 + +Copyright (c) 2011 Dominic Tarr + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/fsevents/node_modules/rc/LICENSE.BSD b/node_modules/fsevents/node_modules/rc/LICENSE.BSD new file mode 100644 index 00000000..96bb796a --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/LICENSE.BSD @@ -0,0 +1,26 @@ +Copyright (c) 2013, Dominic Tarr +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The views and conclusions contained in the software and documentation are those +of the authors and should not be interpreted as representing official policies, +either expressed or implied, of the FreeBSD Project. diff --git a/node_modules/fsevents/node_modules/rc/LICENSE.MIT b/node_modules/fsevents/node_modules/rc/LICENSE.MIT new file mode 100644 index 00000000..6eafbd73 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/LICENSE.MIT @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/rc/README.md b/node_modules/fsevents/node_modules/rc/README.md new file mode 100644 index 00000000..e6522e26 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/README.md @@ -0,0 +1,227 @@ +# rc + +The non-configurable configuration loader for lazy people. + +## Usage + +The only option is to pass rc the name of your app, and your default configuration. + +```javascript +var conf = require('rc')(appname, { + //defaults go here. + port: 2468, + + //defaults which are objects will be merged, not replaced + views: { + engine: 'jade' + } +}); +``` + +`rc` will return your configuration options merged with the defaults you specify. +If you pass in a predefined defaults object, it will be mutated: + +```javascript +var conf = {}; +require('rc')(appname, conf); +``` + +If `rc` finds any config files for your app, the returned config object will have +a `configs` array containing their paths: + +```javascript +var appCfg = require('rc')(appname, conf); +appCfg.configs[0] // /etc/appnamerc +appCfg.configs[1] // /home/dominictarr/.config/appname +appCfg.config // same as appCfg.configs[appCfg.configs.length - 1] +``` + +## Standards + +Given your application name (`appname`), rc will look in all the obvious places for configuration. + + * command line arguments, parsed by minimist _(e.g. `--foo baz`, also nested: `--foo.bar=baz`)_ + * environment variables prefixed with `${appname}_` + * or use "\_\_" to indicate nested properties
_(e.g. `appname_foo__bar__baz` => `foo.bar.baz`)_ + * if you passed an option `--config file` then from that file + * a local `.${appname}rc` or the first found looking in `./ ../ ../../ ../../../` etc. + * `$HOME/.${appname}rc` + * `$HOME/.${appname}/config` + * `$HOME/.config/${appname}` + * `$HOME/.config/${appname}/config` + * `/etc/${appname}rc` + * `/etc/${appname}/config` + * the defaults object you passed in. + +All configuration sources that were found will be flattened into one object, +so that sources **earlier** in this list override later ones. + + +## Configuration File Formats + +Configuration files (e.g. `.appnamerc`) may be in either [json](http://json.org/example) or [ini](http://en.wikipedia.org/wiki/INI_file) format. **No** file extension (`.json` or `.ini`) should be used. The example configurations below are equivalent: + + +#### Formatted as `ini` + +``` +; You can include comments in `ini` format if you want. + +dependsOn=0.10.0 + + +; `rc` has built-in support for ini sections, see? + +[commands] + www = ./commands/www + console = ./commands/repl + + +; You can even do nested sections + +[generators.options] + engine = ejs + +[generators.modules] + new = generate-new + engine = generate-backend + +``` + +#### Formatted as `json` + +```javascript +{ + // You can even comment your JSON, if you want + "dependsOn": "0.10.0", + "commands": { + "www": "./commands/www", + "console": "./commands/repl" + }, + "generators": { + "options": { + "engine": "ejs" + }, + "modules": { + "new": "generate-new", + "backend": "generate-backend" + } + } +} +``` + +Comments are stripped from JSON config via [strip-json-comments](https://github.com/sindresorhus/strip-json-comments). + +> Since ini, and env variables do not have a standard for types, your application needs be prepared for strings. + +To ensure that string representations of booleans and numbers are always converted into their proper types (especially useful if you intend to do strict `===` comparisons), consider using a module such as [parse-strings-in-object](https://github.com/anselanza/parse-strings-in-object) to wrap the config object returned from rc. + + +## Simple example demonstrating precedence +Assume you have an application like this (notice the hard-coded defaults passed to rc): +``` +const conf = require('rc')('myapp', { + port: 12345, + mode: 'test' +}); + +console.log(JSON.stringify(conf, null, 2)); +``` +You also have a file `config.json`, with these contents: +``` +{ + "port": 9000, + "foo": "from config json", + "something": "else" +} +``` +And a file `.myapprc` in the same folder, with these contents: +``` +{ + "port": "3001", + "foo": "bar" +} +``` +Here is the expected output from various commands: + +`node .` +``` +{ + "port": "3001", + "mode": "test", + "foo": "bar", + "_": [], + "configs": [ + "/Users/stephen/repos/conftest/.myapprc" + ], + "config": "/Users/stephen/repos/conftest/.myapprc" +} +``` +*Default `mode` from hard-coded object is retained, but port is overridden by `.myapprc` file (automatically found based on appname match), and `foo` is added.* + + +`node . --foo baz` +``` +{ + "port": "3001", + "mode": "test", + "foo": "baz", + "_": [], + "configs": [ + "/Users/stephen/repos/conftest/.myapprc" + ], + "config": "/Users/stephen/repos/conftest/.myapprc" +} +``` +*Same result as above but `foo` is overridden because command-line arguments take precedence over `.myapprc` file.* + +`node . --foo barbar --config config.json` +``` +{ + "port": 9000, + "mode": "test", + "foo": "barbar", + "something": "else", + "_": [], + "config": "config.json", + "configs": [ + "/Users/stephen/repos/conftest/.myapprc", + "config.json" + ] +} +``` +*Now the `port` comes from the `config.json` file specified (overriding the value from `.myapprc`), and `foo` value is overriden by command-line despite also being specified in the `config.json` file.* + + + +## Advanced Usage + +#### Pass in your own `argv` + +You may pass in your own `argv` as the third argument to `rc`. This is in case you want to [use your own command-line opts parser](https://github.com/dominictarr/rc/pull/12). + +```javascript +require('rc')(appname, defaults, customArgvParser); +``` + +## Pass in your own parser + +If you have a special need to use a non-standard parser, +you can do so by passing in the parser as the 4th argument. +(leave the 3rd as null to get the default args parser) + +```javascript +require('rc')(appname, defaults, null, parser); +``` + +This may also be used to force a more strict format, +such as strict, valid JSON only. + +## Note on Performance + +`rc` is running `fs.statSync`-- so make sure you don't use it in a hot code path (e.g. a request handler) + + +## License + +Multi-licensed under the two-clause BSD License, MIT License, or Apache License, version 2.0 diff --git a/node_modules/fsevents/node_modules/rc/browser.js b/node_modules/fsevents/node_modules/rc/browser.js new file mode 100644 index 00000000..8c230c5c --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/browser.js @@ -0,0 +1,7 @@ + +// when this is loaded into the browser, +// just use the defaults... + +module.exports = function (name, defaults) { + return defaults +} diff --git a/node_modules/fsevents/node_modules/rc/cli.js b/node_modules/fsevents/node_modules/rc/cli.js new file mode 100755 index 00000000..ab05b607 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/cli.js @@ -0,0 +1,4 @@ +#! /usr/bin/env node +var rc = require('./index') + +console.log(JSON.stringify(rc(process.argv[2]), false, 2)) diff --git a/node_modules/fsevents/node_modules/rc/index.js b/node_modules/fsevents/node_modules/rc/index.js new file mode 100755 index 00000000..65eb47af --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/index.js @@ -0,0 +1,53 @@ +var cc = require('./lib/utils') +var join = require('path').join +var deepExtend = require('deep-extend') +var etc = '/etc' +var win = process.platform === "win32" +var home = win + ? process.env.USERPROFILE + : process.env.HOME + +module.exports = function (name, defaults, argv, parse) { + if('string' !== typeof name) + throw new Error('rc(name): name *must* be string') + if(!argv) + argv = require('minimist')(process.argv.slice(2)) + defaults = ( + 'string' === typeof defaults + ? cc.json(defaults) : defaults + ) || {} + + parse = parse || cc.parse + + var env = cc.env(name + '_') + + var configs = [defaults] + var configFiles = [] + function addConfigFile (file) { + if (configFiles.indexOf(file) >= 0) return + var fileConfig = cc.file(file) + if (fileConfig) { + configs.push(parse(fileConfig)) + configFiles.push(file) + } + } + + // which files do we look at? + if (!win) + [join(etc, name, 'config'), + join(etc, name + 'rc')].forEach(addConfigFile) + if (home) + [join(home, '.config', name, 'config'), + join(home, '.config', name), + join(home, '.' + name, 'config'), + join(home, '.' + name + 'rc')].forEach(addConfigFile) + addConfigFile(cc.find('.'+name+'rc')) + if (env.config) addConfigFile(env.config) + if (argv.config) addConfigFile(argv.config) + + return deepExtend.apply(null, configs.concat([ + env, + argv, + configFiles.length ? {configs: configFiles, config: configFiles[configFiles.length - 1]} : undefined, + ])) +} diff --git a/node_modules/fsevents/node_modules/rc/lib/utils.js b/node_modules/fsevents/node_modules/rc/lib/utils.js new file mode 100644 index 00000000..8b3beffa --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/lib/utils.js @@ -0,0 +1,104 @@ +'use strict'; +var fs = require('fs') +var ini = require('ini') +var path = require('path') +var stripJsonComments = require('strip-json-comments') + +var parse = exports.parse = function (content) { + + //if it ends in .json or starts with { then it must be json. + //must be done this way, because ini accepts everything. + //can't just try and parse it and let it throw if it's not ini. + //everything is ini. even json with a syntax error. + + if(/^\s*{/.test(content)) + return JSON.parse(stripJsonComments(content)) + return ini.parse(content) + +} + +var file = exports.file = function () { + var args = [].slice.call(arguments).filter(function (arg) { return arg != null }) + + //path.join breaks if it's a not a string, so just skip this. + for(var i in args) + if('string' !== typeof args[i]) + return + + var file = path.join.apply(null, args) + var content + try { + return fs.readFileSync(file,'utf-8') + } catch (err) { + return + } +} + +var json = exports.json = function () { + var content = file.apply(null, arguments) + return content ? parse(content) : null +} + +var env = exports.env = function (prefix, env) { + env = env || process.env + var obj = {} + var l = prefix.length + for(var k in env) { + if(k.toLowerCase().indexOf(prefix.toLowerCase()) === 0) { + + var keypath = k.substring(l).split('__') + + // Trim empty strings from keypath array + var _emptyStringIndex + while ((_emptyStringIndex=keypath.indexOf('')) > -1) { + keypath.splice(_emptyStringIndex, 1) + } + + var cursor = obj + keypath.forEach(function _buildSubObj(_subkey,i){ + + // (check for _subkey first so we ignore empty strings) + // (check for cursor to avoid assignment to primitive objects) + if (!_subkey || typeof cursor !== 'object') + return + + // If this is the last key, just stuff the value in there + // Assigns actual value from env variable to final key + // (unless it's just an empty string- in that case use the last valid key) + if (i === keypath.length-1) + cursor[_subkey] = env[k] + + + // Build sub-object if nothing already exists at the keypath + if (cursor[_subkey] === undefined) + cursor[_subkey] = {} + + // Increment cursor used to track the object at the current depth + cursor = cursor[_subkey] + + }) + + } + + } + + return obj +} + +var find = exports.find = function () { + var rel = path.join.apply(null, [].slice.call(arguments)) + + function find(start, rel) { + var file = path.join(start, rel) + try { + fs.statSync(file) + return file + } catch (err) { + if(path.dirname(start) !== start) // root + return find(path.dirname(start), rel) + } + } + return find(process.cwd(), rel) +} + + diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/.travis.yml b/node_modules/fsevents/node_modules/rc/node_modules/minimist/.travis.yml new file mode 100644 index 00000000..74c57bf1 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.12" + - "iojs" +before_install: + - npm install -g npm@~1.4.6 diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/LICENSE b/node_modules/fsevents/node_modules/rc/node_modules/minimist/LICENSE new file mode 100644 index 00000000..ee27ba4b --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/example/parse.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/example/parse.js new file mode 100644 index 00000000..abff3e8e --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/example/parse.js @@ -0,0 +1,2 @@ +var argv = require('../')(process.argv.slice(2)); +console.dir(argv); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/index.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/index.js new file mode 100644 index 00000000..6a0559d5 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/index.js @@ -0,0 +1,236 @@ +module.exports = function (args, opts) { + if (!opts) opts = {}; + + var flags = { bools : {}, strings : {}, unknownFn: null }; + + if (typeof opts['unknown'] === 'function') { + flags.unknownFn = opts['unknown']; + } + + if (typeof opts['boolean'] === 'boolean' && opts['boolean']) { + flags.allBools = true; + } else { + [].concat(opts['boolean']).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + }); + } + + var aliases = {}; + Object.keys(opts.alias || {}).forEach(function (key) { + aliases[key] = [].concat(opts.alias[key]); + aliases[key].forEach(function (x) { + aliases[x] = [key].concat(aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + + [].concat(opts.string).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + if (aliases[key]) { + flags.strings[aliases[key]] = true; + } + }); + + var defaults = opts['default'] || {}; + + var argv = { _ : [] }; + Object.keys(flags.bools).forEach(function (key) { + setArg(key, defaults[key] === undefined ? false : defaults[key]); + }); + + var notFlags = []; + + if (args.indexOf('--') !== -1) { + notFlags = args.slice(args.indexOf('--')+1); + args = args.slice(0, args.indexOf('--')); + } + + function argDefined(key, arg) { + return (flags.allBools && /^--[^=]+$/.test(arg)) || + flags.strings[key] || flags.bools[key] || aliases[key]; + } + + function setArg (key, val, arg) { + if (arg && flags.unknownFn && !argDefined(key, arg)) { + if (flags.unknownFn(arg) === false) return; + } + + var value = !flags.strings[key] && isNumber(val) + ? Number(val) : val + ; + setKey(argv, key.split('.'), value); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), value); + }); + } + + function setKey (obj, keys, value) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + if (o[key] === undefined) o[key] = {}; + o = o[key]; + }); + + var key = keys[keys.length - 1]; + if (o[key] === undefined || flags.bools[key] || typeof o[key] === 'boolean') { + o[key] = value; + } + else if (Array.isArray(o[key])) { + o[key].push(value); + } + else { + o[key] = [ o[key], value ]; + } + } + + function aliasIsBoolean(key) { + return aliases[key].some(function (x) { + return flags.bools[x]; + }); + } + + for (var i = 0; i < args.length; i++) { + var arg = args[i]; + + if (/^--.+=/.test(arg)) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); + var key = m[1]; + var value = m[2]; + if (flags.bools[key]) { + value = value !== 'false'; + } + setArg(key, value, arg); + } + else if (/^--no-.+/.test(arg)) { + var key = arg.match(/^--no-(.+)/)[1]; + setArg(key, false, arg); + } + else if (/^--.+/.test(arg)) { + var key = arg.match(/^--(.+)/)[1]; + var next = args[i + 1]; + if (next !== undefined && !/^-/.test(next) + && !flags.bools[key] + && !flags.allBools + && (aliases[key] ? !aliasIsBoolean(key) : true)) { + setArg(key, next, arg); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next === 'true', arg); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + else if (/^-[^-]+/.test(arg)) { + var letters = arg.slice(1,-1).split(''); + + var broken = false; + for (var j = 0; j < letters.length; j++) { + var next = arg.slice(j+2); + + if (next === '-') { + setArg(letters[j], next, arg) + continue; + } + + if (/[A-Za-z]/.test(letters[j]) && /=/.test(next)) { + setArg(letters[j], next.split('=')[1], arg); + broken = true; + break; + } + + if (/[A-Za-z]/.test(letters[j]) + && /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) { + setArg(letters[j], next, arg); + broken = true; + break; + } + + if (letters[j+1] && letters[j+1].match(/\W/)) { + setArg(letters[j], arg.slice(j+2), arg); + broken = true; + break; + } + else { + setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg); + } + } + + var key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1]) + && !flags.bools[key] + && (aliases[key] ? !aliasIsBoolean(key) : true)) { + setArg(key, args[i+1], arg); + i++; + } + else if (args[i+1] && /true|false/.test(args[i+1])) { + setArg(key, args[i+1] === 'true', arg); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + } + else { + if (!flags.unknownFn || flags.unknownFn(arg) !== false) { + argv._.push( + flags.strings['_'] || !isNumber(arg) ? arg : Number(arg) + ); + } + if (opts.stopEarly) { + argv._.push.apply(argv._, args.slice(i + 1)); + break; + } + } + } + + Object.keys(defaults).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) { + setKey(argv, key.split('.'), defaults[key]); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), defaults[key]); + }); + } + }); + + if (opts['--']) { + argv['--'] = new Array(); + notFlags.forEach(function(key) { + argv['--'].push(key); + }); + } + else { + notFlags.forEach(function(key) { + argv._.push(key); + }); + } + + return argv; +}; + +function hasKey (obj, keys) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + o = (o[key] || {}); + }); + + var key = keys[keys.length - 1]; + return key in o; +} + +function isNumber (x) { + if (typeof x === 'number') return true; + if (/^0x[0-9a-f]+$/i.test(x)) return true; + return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} + diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/package.json b/node_modules/fsevents/node_modules/rc/node_modules/minimist/package.json new file mode 100644 index 00000000..de0414d7 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/package.json @@ -0,0 +1,76 @@ +{ + "_args": [ + [ + "minimist@1.2.0", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "minimist@1.2.0", + "_id": "minimist@1.2.0", + "_inBundle": false, + "_integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "_location": "/rc/minimist", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "minimist@1.2.0", + "name": "minimist", + "escapedName": "minimist", + "rawSpec": "1.2.0", + "saveSpec": null, + "fetchSpec": "1.2.0" + }, + "_requiredBy": [ + "/rc" + ], + "_resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "_spec": "1.2.0", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "bugs": { + "url": "https://github.com/substack/minimist/issues" + }, + "description": "parse argument options", + "devDependencies": { + "covert": "^1.0.0", + "tap": "~0.4.0", + "tape": "^3.5.0" + }, + "homepage": "https://github.com/substack/minimist", + "keywords": [ + "argv", + "getopt", + "parser", + "optimist" + ], + "license": "MIT", + "main": "index.js", + "name": "minimist", + "repository": { + "type": "git", + "url": "git://github.com/substack/minimist.git" + }, + "scripts": { + "coverage": "covert test/*.js", + "test": "tap test/*.js" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/6..latest", + "ff/5", + "firefox/latest", + "chrome/10", + "chrome/latest", + "safari/5.1", + "safari/latest", + "opera/12" + ] + }, + "version": "1.2.0" +} diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/readme.markdown b/node_modules/fsevents/node_modules/rc/node_modules/minimist/readme.markdown new file mode 100644 index 00000000..30a74cf8 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/readme.markdown @@ -0,0 +1,91 @@ +# minimist + +parse argument options + +This module is the guts of optimist's argument parser without all the +fanciful decoration. + +[![browser support](https://ci.testling.com/substack/minimist.png)](http://ci.testling.com/substack/minimist) + +[![build status](https://secure.travis-ci.org/substack/minimist.png)](http://travis-ci.org/substack/minimist) + +# example + +``` js +var argv = require('minimist')(process.argv.slice(2)); +console.dir(argv); +``` + +``` +$ node example/parse.js -a beep -b boop +{ _: [], a: 'beep', b: 'boop' } +``` + +``` +$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz +{ _: [ 'foo', 'bar', 'baz' ], + x: 3, + y: 4, + n: 5, + a: true, + b: true, + c: true, + beep: 'boop' } +``` + +# methods + +``` js +var parseArgs = require('minimist') +``` + +## var argv = parseArgs(args, opts={}) + +Return an argument object `argv` populated with the array arguments from `args`. + +`argv._` contains all the arguments that didn't have an option associated with +them. + +Numeric-looking arguments will be returned as numbers unless `opts.string` or +`opts.boolean` is set for that argument name. + +Any arguments after `'--'` will not be parsed and will end up in `argv._`. + +options can be: + +* `opts.string` - a string or array of strings argument names to always treat as +strings +* `opts.boolean` - a boolean, string or array of strings to always treat as +booleans. if `true` will treat all double hyphenated arguments without equal signs +as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`) +* `opts.alias` - an object mapping string names to strings or arrays of string +argument names to use as aliases +* `opts.default` - an object mapping string argument names to default values +* `opts.stopEarly` - when true, populate `argv._` with everything after the +first non-option +* `opts['--']` - when true, populate `argv._` with everything before the `--` +and `argv['--']` with everything after the `--`. Here's an example: +* `opts.unknown` - a function which is invoked with a command line parameter not +defined in the `opts` configuration object. If the function returns `false`, the +unknown option is not added to `argv`. + +``` +> require('./')('one two three -- four five --six'.split(' '), { '--': true }) +{ _: [ 'one', 'two', 'three' ], + '--': [ 'four', 'five', '--six' ] } +``` + +Note that with `opts['--']` set, parsing for arguments still stops after the +`--`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install minimist +``` + +# license + +MIT diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/all_bool.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/all_bool.js new file mode 100644 index 00000000..ac835483 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/all_bool.js @@ -0,0 +1,32 @@ +var parse = require('../'); +var test = require('tape'); + +test('flag boolean true (default all --args to boolean)', function (t) { + var argv = parse(['moo', '--honk', 'cow'], { + boolean: true + }); + + t.deepEqual(argv, { + honk: true, + _: ['moo', 'cow'] + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); + +test('flag boolean true only affects double hyphen arguments without equals signs', function (t) { + var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], { + boolean: true + }); + + t.deepEqual(argv, { + honk: true, + tacos: 'good', + p: 55, + _: ['moo', 'cow'] + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/bool.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/bool.js new file mode 100644 index 00000000..14b0717c --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/bool.js @@ -0,0 +1,166 @@ +var parse = require('../'); +var test = require('tape'); + +test('flag boolean default false', function (t) { + var argv = parse(['moo'], { + boolean: ['t', 'verbose'], + default: { verbose: false, t: false } + }); + + t.deepEqual(argv, { + verbose: false, + t: false, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], { + boolean: ['x','y','z'] + }); + + t.deepEqual(argv, { + x : true, + y : false, + z : true, + _ : [ 'one', 'two', 'three' ] + }); + + t.deepEqual(typeof argv.x, 'boolean'); + t.deepEqual(typeof argv.y, 'boolean'); + t.deepEqual(typeof argv.z, 'boolean'); + t.end(); +}); +test('boolean and alias with chainable api', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + herp: { alias: 'h', boolean: true } + }; + var aliasedArgv = parse(aliased, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var propertyArgv = parse(regular, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + alias: { 'h': 'herp' }, + boolean: 'herp' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias array with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var alt = [ '--harp', 'derp' ]; + var opts = { + alias: { 'h': ['herp', 'harp'] }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var altPropertyArgv = parse(alt, opts); + var expected = { + harp: true, + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.same(altPropertyArgv, expected); + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = [ '-h', 'true' ]; + var regular = [ '--herp', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function(t) { + var parsed = parse(['--boool', '--other=true'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = parse(['--boool', '--other=false'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); + +test('boolean --boool=true', function (t) { + var parsed = parse(['--boool=true'], { + default: { + boool: false + }, + boolean: ['boool'] + }); + + t.same(parsed.boool, true); + t.end(); +}); + +test('boolean --boool=false', function (t) { + var parsed = parse(['--boool=false'], { + default: { + boool: true + }, + boolean: ['boool'] + }); + + t.same(parsed.boool, false); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dash.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dash.js new file mode 100644 index 00000000..5a4fa5be --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dash.js @@ -0,0 +1,31 @@ +var parse = require('../'); +var test = require('tape'); + +test('-', function (t) { + t.plan(5); + t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] }); + t.deepEqual(parse([ '-' ]), { _: [ '-' ] }); + t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] }); + t.deepEqual( + parse([ '-b', '-' ], { boolean: 'b' }), + { b: true, _: [ '-' ] } + ); + t.deepEqual( + parse([ '-s', '-' ], { string: 's' }), + { s: '-', _: [] } + ); +}); + +test('-a -- b', function (t) { + t.plan(3); + t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); +}); + +test('move arguments after the -- into their own `--` array', function(t) { + t.plan(1); + t.deepEqual( + parse([ '--name', 'John', 'before', '--', 'after' ], { '--': true }), + { name: 'John', _: [ 'before' ], '--': [ 'after' ] }); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/default_bool.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/default_bool.js new file mode 100644 index 00000000..780a3112 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/default_bool.js @@ -0,0 +1,35 @@ +var test = require('tape'); +var parse = require('../'); + +test('boolean default true', function (t) { + var argv = parse([], { + boolean: 'sometrue', + default: { sometrue: true } + }); + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = parse([], { + boolean: 'somefalse', + default: { somefalse: false } + }); + t.equal(argv.somefalse, false); + t.end(); +}); + +test('boolean default to null', function (t) { + var argv = parse([], { + boolean: 'maybe', + default: { maybe: null } + }); + t.equal(argv.maybe, null); + var argv = parse(['--maybe'], { + boolean: 'maybe', + default: { maybe: null } + }); + t.equal(argv.maybe, true); + t.end(); + +}) diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dotted.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dotted.js new file mode 100644 index 00000000..d8b3e856 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dotted.js @@ -0,0 +1,22 @@ +var parse = require('../'); +var test = require('tape'); + +test('dotted alias', function (t) { + var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 22); + t.equal(argv.aa.bb, 22); + t.end(); +}); + +test('dotted default', function (t) { + var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 11); + t.equal(argv.aa.bb, 11); + t.end(); +}); + +test('dotted default with no alias', function (t) { + var argv = parse('', {default: {'a.b': 11}}); + t.equal(argv.a.b, 11); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/kv_short.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/kv_short.js new file mode 100644 index 00000000..f813b305 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/kv_short.js @@ -0,0 +1,16 @@ +var parse = require('../'); +var test = require('tape'); + +test('short -k=v' , function (t) { + t.plan(1); + + var argv = parse([ '-b=123' ]); + t.deepEqual(argv, { b: 123, _: [] }); +}); + +test('multi short -k=v' , function (t) { + t.plan(1); + + var argv = parse([ '-a=whatever', '-b=robots' ]); + t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] }); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/long.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/long.js new file mode 100644 index 00000000..5d3a1e09 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/long.js @@ -0,0 +1,31 @@ +var test = require('tape'); +var parse = require('../'); + +test('long opts', function (t) { + t.deepEqual( + parse([ '--bool' ]), + { bool : true, _ : [] }, + 'long boolean' + ); + t.deepEqual( + parse([ '--pow', 'xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture sp' + ); + t.deepEqual( + parse([ '--pow=xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture eq' + ); + t.deepEqual( + parse([ '--host', 'localhost', '--port', '555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures sp' + ); + t.deepEqual( + parse([ '--host=localhost', '--port=555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures eq' + ); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/num.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/num.js new file mode 100644 index 00000000..2cc77f4d --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/num.js @@ -0,0 +1,36 @@ +var parse = require('../'); +var test = require('tape'); + +test('nums', function (t) { + var argv = parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789' + ]); + t.deepEqual(argv, { + x : 1234, + y : 5.67, + z : 1e7, + w : '10f', + hex : 0xdeadbeef, + _ : [ 789 ] + }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv.y, 'number'); + t.deepEqual(typeof argv.z, 'number'); + t.deepEqual(typeof argv.w, 'string'); + t.deepEqual(typeof argv.hex, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); + +test('already a number', function (t) { + var argv = parse([ '-x', 1234, 789 ]); + t.deepEqual(argv, { x : 1234, _ : [ 789 ] }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse.js new file mode 100644 index 00000000..7b4a2a17 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse.js @@ -0,0 +1,197 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse args', function (t) { + t.deepEqual( + parse([ '--no-moo' ]), + { moo : false, _ : [] }, + 'no' + ); + t.deepEqual( + parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]), + { v : ['a','b','c'], _ : [] }, + 'multi' + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.deepEqual( + parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek' + ]), + { + c : true, + a : true, + t : true, + s : 'woo', + h : 'awesome', + b : true, + bool : true, + key : 'value', + multi : [ 'quux', 'baz' ], + meep : false, + name : 'meowmers', + _ : [ 'bare', '--not-a-flag', 'eek' ] + } + ); + t.end(); +}); + +test('flag boolean', function (t) { + var argv = parse([ '-t', 'moo' ], { boolean: 't' }); + t.deepEqual(argv, { t : true, _ : [ 'moo' ] }); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], { + boolean: [ 't', 'verbose' ], + default: { verbose: true } + }); + + t.deepEqual(argv, { + verbose: false, + t: true, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('newlines in params' , function (t) { + var args = parse([ '-s', "X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = parse([ "--s=X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + t.end(); +}); + +test('strings' , function (t) { + var s = parse([ '-s', '0001234' ], { string: 's' }).s; + t.equal(s, '0001234'); + t.equal(typeof s, 'string'); + + var x = parse([ '-x', '56' ], { string: 'x' }).x; + t.equal(x, '56'); + t.equal(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = parse([ ' ', ' ' ], { string: '_' })._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('empty strings', function(t) { + var s = parse([ '-s' ], { string: 's' }).s; + t.equal(s, ''); + t.equal(typeof s, 'string'); + + var str = parse([ '--str' ], { string: 'str' }).str; + t.equal(str, ''); + t.equal(typeof str, 'string'); + + var letters = parse([ '-art' ], { + string: [ 'a', 't' ] + }); + + t.equal(letters.a, ''); + t.equal(letters.r, true); + t.equal(letters.t, ''); + + t.end(); +}); + + +test('string and alias', function(t) { + var x = parse([ '--str', '000123' ], { + string: 's', + alias: { s: 'str' } + }); + + t.equal(x.str, '000123'); + t.equal(typeof x.str, 'string'); + t.equal(x.s, '000123'); + t.equal(typeof x.s, 'string'); + + var y = parse([ '-s', '000123' ], { + string: 'str', + alias: { str: 's' } + }); + + t.equal(y.str, '000123'); + t.equal(typeof y.str, 'string'); + t.equal(y.s, '000123'); + t.equal(typeof y.s, 'string'); + t.end(); +}); + +test('slashBreak', function (t) { + t.same( + parse([ '-I/foo/bar/baz' ]), + { I : '/foo/bar/baz', _ : [] } + ); + t.same( + parse([ '-xyz/foo/bar/baz' ]), + { x : true, y : true, z : '/foo/bar/baz', _ : [] } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: 'zoom' } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: [ 'zm', 'zoom' ] } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = parse([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop' + ]); + + t.same(argv.foo, { + bar : 3, + baz : 4, + quux : { + quibble : 5, + o_O : true + } + }); + t.same(argv.beep, { boop : true }); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse_modified.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse_modified.js new file mode 100644 index 00000000..ab620dc5 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse_modified.js @@ -0,0 +1,9 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse with modifier functions' , function (t) { + t.plan(1); + + var argv = parse([ '-b', '123' ], { boolean: 'b' }); + t.deepEqual(argv, { b: true, _: [123] }); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/short.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/short.js new file mode 100644 index 00000000..d513a1c2 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/short.js @@ -0,0 +1,67 @@ +var parse = require('../'); +var test = require('tape'); + +test('numeric short args', function (t) { + t.plan(2); + t.deepEqual(parse([ '-n123' ]), { n: 123, _: [] }); + t.deepEqual( + parse([ '-123', '456' ]), + { 1: true, 2: true, 3: 456, _: [] } + ); +}); + +test('short', function (t) { + t.deepEqual( + parse([ '-b' ]), + { b : true, _ : [] }, + 'short boolean' + ); + t.deepEqual( + parse([ 'foo', 'bar', 'baz' ]), + { _ : [ 'foo', 'bar', 'baz' ] }, + 'bare' + ); + t.deepEqual( + parse([ '-cats' ]), + { c : true, a : true, t : true, s : true, _ : [] }, + 'group' + ); + t.deepEqual( + parse([ '-cats', 'meow' ]), + { c : true, a : true, t : true, s : 'meow', _ : [] }, + 'short group next' + ); + t.deepEqual( + parse([ '-h', 'localhost' ]), + { h : 'localhost', _ : [] }, + 'short capture' + ); + t.deepEqual( + parse([ '-h', 'localhost', '-p', '555' ]), + { h : 'localhost', p : 555, _ : [] }, + 'short captures' + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.deepEqual( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/stop_early.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/stop_early.js new file mode 100644 index 00000000..bdf9fbcb --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/stop_early.js @@ -0,0 +1,15 @@ +var parse = require('../'); +var test = require('tape'); + +test('stops parsing on the first non-option when stopEarly is set', function (t) { + var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], { + stopEarly: true + }); + + t.deepEqual(argv, { + aaa: 'bbb', + _: ['ccc', '--ddd'] + }); + + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/unknown.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/unknown.js new file mode 100644 index 00000000..462a36bd --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/unknown.js @@ -0,0 +1,102 @@ +var parse = require('../'); +var test = require('tape'); + +test('boolean and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'true', '--derp', 'true' ]; + var regular = [ '--herp', 'true', '-d', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('flag boolean true any double hyphen argument is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var argv = parse(['--honk', '--tacos=good', 'cow', '-p', '55'], { + boolean: true, + unknown: unknownFn + }); + t.same(unknown, ['--tacos=good', 'cow', '-p']); + t.same(argv, { + honk: true, + _: [] + }); + t.end(); +}); + +test('string and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'hello', '--derp', 'goodbye' ]; + var regular = [ '--herp', 'hello', '-d', 'moon' ]; + var opts = { + alias: { h: 'herp' }, + string: 'h', + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('default and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'hello' ]; + var regular = [ '--herp', 'hello' ]; + var opts = { + default: { 'h': 'bar' }, + alias: { 'h': 'herp' }, + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, []); + t.end(); + unknownFn(); // exercise fn for 100% coverage +}); + +test('value following -- is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '--bad', '--', 'good', 'arg' ]; + var opts = { + '--': true, + unknown: unknownFn + }; + var argv = parse(aliased, opts); + + t.same(unknown, ['--bad']); + t.same(argv, { + '--': ['good', 'arg'], + '_': [] + }) + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/whitespace.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/whitespace.js new file mode 100644 index 00000000..8a52a58c --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/whitespace.js @@ -0,0 +1,8 @@ +var parse = require('../'); +var test = require('tape'); + +test('whitespace should be whitespace' , function (t) { + t.plan(1); + var x = parse([ '-x', '\t' ]).x; + t.equal(x, '\t'); +}); diff --git a/node_modules/fsevents/node_modules/rc/package.json b/node_modules/fsevents/node_modules/rc/package.json new file mode 100644 index 00000000..149af9c2 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/package.json @@ -0,0 +1,67 @@ +{ + "_args": [ + [ + "rc@1.2.8", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "rc@1.2.8", + "_id": "rc@1.2.8", + "_inBundle": false, + "_integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "_location": "/rc", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "rc@1.2.8", + "name": "rc", + "escapedName": "rc", + "rawSpec": "1.2.8", + "saveSpec": null, + "fetchSpec": "1.2.8" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "_spec": "1.2.8", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Dominic Tarr", + "email": "dominic.tarr@gmail.com", + "url": "dominictarr.com" + }, + "bin": { + "rc": "./cli.js" + }, + "browser": "browser.js", + "bugs": { + "url": "https://github.com/dominictarr/rc/issues" + }, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "description": "hardwired configuration loader", + "homepage": "https://github.com/dominictarr/rc#readme", + "keywords": [ + "config", + "rc", + "unix", + "defaults" + ], + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "main": "index.js", + "name": "rc", + "repository": { + "type": "git", + "url": "git+https://github.com/dominictarr/rc.git" + }, + "scripts": { + "test": "set -e; node test/test.js; node test/ini.js; node test/nested-env-vars.js" + }, + "version": "1.2.8" +} diff --git a/node_modules/fsevents/node_modules/rc/test/ini.js b/node_modules/fsevents/node_modules/rc/test/ini.js new file mode 100644 index 00000000..e6857f8b --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/test/ini.js @@ -0,0 +1,16 @@ +var cc =require('../lib/utils') +var INI = require('ini') +var assert = require('assert') + +function test(obj) { + + var _json, _ini + var json = cc.parse (_json = JSON.stringify(obj)) + var ini = cc.parse (_ini = INI.stringify(obj)) + console.log(_ini, _json) + assert.deepEqual(json, ini) +} + + +test({hello: true}) + diff --git a/node_modules/fsevents/node_modules/rc/test/nested-env-vars.js b/node_modules/fsevents/node_modules/rc/test/nested-env-vars.js new file mode 100644 index 00000000..0ecd1763 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/test/nested-env-vars.js @@ -0,0 +1,50 @@ + +var seed = Math.random(); +var n = 'rc'+ seed; +var N = 'RC'+ seed; +var assert = require('assert') + + +// Basic usage +process.env[n+'_someOpt__a'] = 42 +process.env[n+'_someOpt__x__'] = 99 +process.env[n+'_someOpt__a__b'] = 186 +process.env[n+'_someOpt__a__b__c'] = 243 +process.env[n+'_someOpt__x__y'] = 1862 +process.env[n+'_someOpt__z'] = 186577 + +// Should ignore empty strings from orphaned '__' +process.env[n+'_someOpt__z__x__'] = 18629 +process.env[n+'_someOpt__w__w__'] = 18629 + +// Leading '__' should ignore everything up to 'z' +process.env[n+'___z__i__'] = 9999 + +// should ignore case for config name section. +process.env[N+'_test_upperCase'] = 187 + +function testPrefix(prefix) { + var config = require('../')(prefix, { + option: true + }) + + console.log('\n\n------ nested-env-vars ------\n',{prefix: prefix}, '\n', config); + + assert.equal(config.option, true) + assert.equal(config.someOpt.a, 42) + assert.equal(config.someOpt.x, 99) + // Should not override `a` once it's been set + assert.equal(config.someOpt.a/*.b*/, 42) + // Should not override `x` once it's been set + assert.equal(config.someOpt.x/*.y*/, 99) + assert.equal(config.someOpt.z, 186577) + // Should not override `z` once it's been set + assert.equal(config.someOpt.z/*.x*/, 186577) + assert.equal(config.someOpt.w.w, 18629) + assert.equal(config.z.i, 9999) + + assert.equal(config.test_upperCase, 187) +} + +testPrefix(n); +testPrefix(N); diff --git a/node_modules/fsevents/node_modules/rc/test/test.js b/node_modules/fsevents/node_modules/rc/test/test.js new file mode 100644 index 00000000..4f633518 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/test/test.js @@ -0,0 +1,59 @@ + +var n = 'rc'+Math.random() +var assert = require('assert') + +process.env[n+'_envOption'] = 42 + +var config = require('../')(n, { + option: true +}) + +console.log(config) + +assert.equal(config.option, true) +assert.equal(config.envOption, 42) + +var customArgv = require('../')(n, { + option: true +}, { // nopt-like argv + option: false, + envOption: 24, + argv: { + remain: [], + cooked: ['--no-option', '--envOption', '24'], + original: ['--no-option', '--envOption=24'] + } +}) + +console.log(customArgv) + +assert.equal(customArgv.option, false) +assert.equal(customArgv.envOption, 24) + +var fs = require('fs') +var path = require('path') +var jsonrc = path.resolve('.' + n + 'rc'); + +fs.writeFileSync(jsonrc, [ + '{', + '// json overrides default', + '"option": false,', + '/* env overrides json */', + '"envOption": 24', + '}' +].join('\n')); + +var commentedJSON = require('../')(n, { + option: true +}) + +fs.unlinkSync(jsonrc); + +console.log(commentedJSON) + +assert.equal(commentedJSON.option, false) +assert.equal(commentedJSON.envOption, 42) + +assert.equal(commentedJSON.config, jsonrc) +assert.equal(commentedJSON.configs.length, 1) +assert.equal(commentedJSON.configs[0], jsonrc) diff --git a/node_modules/fsevents/node_modules/readable-stream/.travis.yml b/node_modules/fsevents/node_modules/readable-stream/.travis.yml new file mode 100644 index 00000000..40992555 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/.travis.yml @@ -0,0 +1,55 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/fsevents/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/fsevents/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 00000000..f478d58d --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/fsevents/node_modules/readable-stream/GOVERNANCE.md b/node_modules/fsevents/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 00000000..16ffb93f --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/fsevents/node_modules/readable-stream/LICENSE b/node_modules/fsevents/node_modules/readable-stream/LICENSE new file mode 100644 index 00000000..2873b3b2 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/fsevents/node_modules/readable-stream/README.md b/node_modules/fsevents/node_modules/readable-stream/README.md new file mode 100644 index 00000000..23fe3f3e --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/fsevents/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/fsevents/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 00000000..83275f19 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/fsevents/node_modules/readable-stream/duplex-browser.js b/node_modules/fsevents/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 00000000..f8b2db83 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/fsevents/node_modules/readable-stream/duplex.js b/node_modules/fsevents/node_modules/readable-stream/duplex.js new file mode 100644 index 00000000..46924cbf --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000..a1ca813e --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000..a9c83588 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000..bf34ac65 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000..5d1f8b87 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000..b3f4e85a --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,687 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 00000000..aefc68bd --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,79 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 00000000..5a0a0d88 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,74 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 00000000..9332a3fd --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 00000000..ce2ad5b6 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/fsevents/node_modules/readable-stream/package.json b/node_modules/fsevents/node_modules/readable-stream/package.json new file mode 100644 index 00000000..62332398 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/package.json @@ -0,0 +1,84 @@ +{ + "_args": [ + [ + "readable-stream@2.3.6", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "readable-stream@2.3.6", + "_id": "readable-stream@2.3.6", + "_inBundle": false, + "_integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "_location": "/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "readable-stream@2.3.6", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "2.3.6", + "saveSpec": null, + "fetchSpec": "2.3.6" + }, + "_requiredBy": [ + "/are-we-there-yet" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "_spec": "2.3.6", + "_where": "/Users/pipobscure/fsevents", + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js" + }, + "version": "2.3.6" +} diff --git a/node_modules/fsevents/node_modules/readable-stream/passthrough.js b/node_modules/fsevents/node_modules/readable-stream/passthrough.js new file mode 100644 index 00000000..ffd791d7 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/fsevents/node_modules/readable-stream/readable-browser.js b/node_modules/fsevents/node_modules/readable-stream/readable-browser.js new file mode 100644 index 00000000..e5037259 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/fsevents/node_modules/readable-stream/readable.js b/node_modules/fsevents/node_modules/readable-stream/readable.js new file mode 100644 index 00000000..ec89ec53 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/fsevents/node_modules/readable-stream/transform.js b/node_modules/fsevents/node_modules/readable-stream/transform.js new file mode 100644 index 00000000..b1baba26 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/fsevents/node_modules/readable-stream/writable-browser.js b/node_modules/fsevents/node_modules/readable-stream/writable-browser.js new file mode 100644 index 00000000..ebdde6a8 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/fsevents/node_modules/readable-stream/writable.js b/node_modules/fsevents/node_modules/readable-stream/writable.js new file mode 100644 index 00000000..3211a6f8 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/fsevents/node_modules/rimraf/LICENSE b/node_modules/fsevents/node_modules/rimraf/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/rimraf/README.md b/node_modules/fsevents/node_modules/rimraf/README.md new file mode 100644 index 00000000..423b8cf8 --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/README.md @@ -0,0 +1,101 @@ +[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies) + +The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node. + +Install with `npm install rimraf`, or just drop rimraf.js somewhere. + +## API + +`rimraf(f, [opts], callback)` + +The first parameter will be interpreted as a globbing pattern for files. If you +want to disable globbing you can do so with `opts.disableGlob` (defaults to +`false`). This might be handy, for instance, if you have filenames that contain +globbing wildcard characters. + +The callback will be called with an error if there is one. Certain +errors are handled for you: + +* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of + `opts.maxBusyTries` times before giving up, adding 100ms of wait + between each attempt. The default `maxBusyTries` is 3. +* `ENOENT` - If the file doesn't exist, rimraf will return + successfully, since your desired outcome is already the case. +* `EMFILE` - Since `readdir` requires opening a file descriptor, it's + possible to hit `EMFILE` if too many file descriptors are in use. + In the sync case, there's nothing to be done for this. But in the + async case, rimraf will gradually back off with timeouts up to + `opts.emfileWait` ms, which defaults to 1000. + +## options + +* unlink, chmod, stat, lstat, rmdir, readdir, + unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync + + In order to use a custom file system library, you can override + specific fs functions on the options object. + + If any of these functions are present on the options object, then + the supplied function will be used instead of the default fs + method. + + Sync methods are only relevant for `rimraf.sync()`, of course. + + For example: + + ```javascript + var myCustomFS = require('some-custom-fs') + + rimraf('some-thing', myCustomFS, callback) + ``` + +* maxBusyTries + + If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered + on Windows systems, then rimraf will retry with a linear backoff + wait of 100ms longer on each try. The default maxBusyTries is 3. + + Only relevant for async usage. + +* emfileWait + + If an `EMFILE` error is encountered, then rimraf will retry + repeatedly with a linear backoff of 1ms longer on each try, until + the timeout counter hits this max. The default limit is 1000. + + If you repeatedly encounter `EMFILE` errors, then consider using + [graceful-fs](http://npm.im/graceful-fs) in your program. + + Only relevant for async usage. + +* glob + + Set to `false` to disable [glob](http://npm.im/glob) pattern + matching. + + Set to an object to pass options to the glob module. The default + glob options are `{ nosort: true, silent: true }`. + + Glob version 6 is used in this module. + + Relevant for both sync and async usage. + +* disableGlob + + Set to any non-falsey value to disable globbing entirely. + (Equivalent to setting `glob: false`.) + +## rimraf.sync + +It can remove stuff synchronously, too. But that's not so good. Use +the async API. It's better. + +## CLI + +If installed with `npm install rimraf -g` it can be used as a global +command `rimraf [ ...]` which is useful for cross platform support. + +## mkdirp + +If you need to create a directory recursively, check out +[mkdirp](https://github.com/substack/node-mkdirp). diff --git a/node_modules/fsevents/node_modules/rimraf/bin.js b/node_modules/fsevents/node_modules/rimraf/bin.js new file mode 100755 index 00000000..0d1e17be --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/bin.js @@ -0,0 +1,50 @@ +#!/usr/bin/env node + +var rimraf = require('./') + +var help = false +var dashdash = false +var noglob = false +var args = process.argv.slice(2).filter(function(arg) { + if (dashdash) + return !!arg + else if (arg === '--') + dashdash = true + else if (arg === '--no-glob' || arg === '-G') + noglob = true + else if (arg === '--glob' || arg === '-g') + noglob = false + else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) + help = true + else + return !!arg +}) + +if (help || args.length === 0) { + // If they didn't ask for help, then this is not a "success" + var log = help ? console.log : console.error + log('Usage: rimraf [ ...]') + log('') + log(' Deletes all files and folders at "path" recursively.') + log('') + log('Options:') + log('') + log(' -h, --help Display this usage info') + log(' -G, --no-glob Do not expand glob patterns in arguments') + log(' -g, --glob Expand glob patterns in arguments (default)') + process.exit(help ? 0 : 1) +} else + go(0) + +function go (n) { + if (n >= args.length) + return + var options = {} + if (noglob) + options = { glob: false } + rimraf(args[n], options, function (er) { + if (er) + throw er + go(n+1) + }) +} diff --git a/node_modules/fsevents/node_modules/rimraf/package.json b/node_modules/fsevents/node_modules/rimraf/package.json new file mode 100644 index 00000000..d4cc05c1 --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/package.json @@ -0,0 +1,70 @@ +{ + "_args": [ + [ + "rimraf@2.6.3", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "rimraf@2.6.3", + "_id": "rimraf@2.6.3", + "_inBundle": false, + "_integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "_location": "/rimraf", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "rimraf@2.6.3", + "name": "rimraf", + "escapedName": "rimraf", + "rawSpec": "2.6.3", + "saveSpec": null, + "fetchSpec": "2.6.3" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "_spec": "2.6.3", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bin": { + "rimraf": "./bin.js" + }, + "bugs": { + "url": "https://github.com/isaacs/rimraf/issues" + }, + "dependencies": { + "glob": "^7.1.3" + }, + "description": "A deep deletion module for node (like `rm -rf`)", + "devDependencies": { + "mkdirp": "^0.5.1", + "tap": "^12.1.1" + }, + "files": [ + "LICENSE", + "README.md", + "bin.js", + "rimraf.js" + ], + "homepage": "https://github.com/isaacs/rimraf#readme", + "license": "ISC", + "main": "rimraf.js", + "name": "rimraf", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/rimraf.git" + }, + "scripts": { + "postpublish": "git push origin --all; git push origin --tags", + "postversion": "npm publish", + "preversion": "npm test", + "test": "tap test/*.js" + }, + "version": "2.6.3" +} diff --git a/node_modules/fsevents/node_modules/rimraf/rimraf.js b/node_modules/fsevents/node_modules/rimraf/rimraf.js new file mode 100644 index 00000000..e80dd106 --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/rimraf.js @@ -0,0 +1,364 @@ +module.exports = rimraf +rimraf.sync = rimrafSync + +var assert = require("assert") +var path = require("path") +var fs = require("fs") +var glob = require("glob") +var _0666 = parseInt('666', 8) + +var defaultGlobOpts = { + nosort: true, + silent: true +} + +// for EMFILE handling +var timeout = 0 + +var isWindows = (process.platform === "win32") + +function defaults (options) { + var methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(function(m) { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} + +function rimraf (p, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + var busyTries = 0 + var errState = null + var n = 0 + + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) + + options.lstat(p, function (er, stat) { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + + function next (er) { + errState = errState || er + if (--n === 0) + cb(errState) + } + + function afterGlob (er, results) { + if (er) + return cb(er) + + n = results.length + if (n === 0) + return cb() + + results.forEach(function (p) { + rimraf_(p, options, function CB (er) { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + var time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(function () { + rimraf_(p, options, CB) + }, time) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(function () { + rimraf_(p, options, CB) + }, timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + }) + }) + } +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, function (er, st) { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, function (er) { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} + +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + if (er) + assert(er instanceof Error) + + options.chmod(p, _0666, function (er2) { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, function(er3, stats) { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} + +function fixWinEPERMSync (p, options, er) { + assert(p) + assert(options) + if (er) + assert(er instanceof Error) + + try { + options.chmodSync(p, _0666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } + + try { + var stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } + + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) +} + +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, function (er) { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} + +function rmkids(p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, function (er, files) { + if (er) + return cb(er) + var n = files.length + if (n === 0) + return options.rmdir(p, cb) + var errState + files.forEach(function (f) { + rimraf(path.join(p, f), options, function (er) { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + var results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + + if (!results.length) + return + + for (var i = 0; i < results.length; i++) { + var p = results[i] + + try { + var st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) + } + } +} + +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} + +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(function (f) { + rimrafSync(path.join(p, f), options) + }) + + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + var retries = isWindows ? 100 : 1 + var i = 0 + do { + var threw = true + try { + var ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) +} diff --git a/node_modules/fsevents/node_modules/safe-buffer/LICENSE b/node_modules/fsevents/node_modules/safe-buffer/LICENSE new file mode 100644 index 00000000..0c068cee --- /dev/null +++ b/node_modules/fsevents/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/safe-buffer/README.md b/node_modules/fsevents/node_modules/safe-buffer/README.md new file mode 100644 index 00000000..e9a81afd --- /dev/null +++ b/node_modules/fsevents/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/fsevents/node_modules/safe-buffer/index.d.ts b/node_modules/fsevents/node_modules/safe-buffer/index.d.ts new file mode 100644 index 00000000..e9fed809 --- /dev/null +++ b/node_modules/fsevents/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/safe-buffer/index.js b/node_modules/fsevents/node_modules/safe-buffer/index.js new file mode 100644 index 00000000..22438dab --- /dev/null +++ b/node_modules/fsevents/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/fsevents/node_modules/safe-buffer/package.json b/node_modules/fsevents/node_modules/safe-buffer/package.json new file mode 100644 index 00000000..809bbd23 --- /dev/null +++ b/node_modules/fsevents/node_modules/safe-buffer/package.json @@ -0,0 +1,68 @@ +{ + "_args": [ + [ + "safe-buffer@5.1.2", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "safe-buffer@5.1.2", + "_id": "safe-buffer@5.1.2", + "_inBundle": false, + "_integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "_location": "/safe-buffer", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "safe-buffer@5.1.2", + "name": "safe-buffer", + "escapedName": "safe-buffer", + "rawSpec": "5.1.2", + "saveSpec": null, + "fetchSpec": "5.1.2" + }, + "_requiredBy": [ + "/minipass", + "/readable-stream", + "/string_decoder", + "/tar" + ], + "_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "_spec": "5.1.2", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "description": "Safer Node.js Buffer API", + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "name": "safe-buffer", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "types": "index.d.ts", + "version": "5.1.2" +} diff --git a/node_modules/fsevents/node_modules/safer-buffer/LICENSE b/node_modules/fsevents/node_modules/safer-buffer/LICENSE new file mode 100644 index 00000000..4fe9e6f1 --- /dev/null +++ b/node_modules/fsevents/node_modules/safer-buffer/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Nikita Skovoroda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/fsevents/node_modules/safer-buffer/Porting-Buffer.md b/node_modules/fsevents/node_modules/safer-buffer/Porting-Buffer.md new file mode 100644 index 00000000..68d86bab --- /dev/null +++ b/node_modules/fsevents/node_modules/safer-buffer/Porting-Buffer.md @@ -0,0 +1,268 @@ +# Porting to the Buffer.from/Buffer.alloc API + + +## Overview + +- [Variant 1: Drop support for Node.js ≤ 4.4.x and 5.0.0 — 5.9.x.](#variant-1) (*recommended*) +- [Variant 2: Use a polyfill](#variant-2) +- [Variant 3: manual detection, with safeguards](#variant-3) + +### Finding problematic bits of code using grep + +Just run `grep -nrE '[^a-zA-Z](Slow)?Buffer\s*\(' --exclude-dir node_modules`. + +It will find all the potentially unsafe places in your own code (with some considerably unlikely +exceptions). + +### Finding problematic bits of code using Node.js 8 + +If you’re using Node.js ≥ 8.0.0 (which is recommended), Node.js exposes multiple options that help with finding the relevant pieces of code: + +- `--trace-warnings` will make Node.js show a stack trace for this warning and other warnings that are printed by Node.js. +- `--trace-deprecation` does the same thing, but only for deprecation warnings. +- `--pending-deprecation` will show more types of deprecation warnings. In particular, it will show the `Buffer()` deprecation warning, even on Node.js 8. + +You can set these flags using an environment variable: + +```console +$ export NODE_OPTIONS='--trace-warnings --pending-deprecation' +$ cat example.js +'use strict'; +const foo = new Buffer('foo'); +$ node example.js +(node:7147) [DEP0005] DeprecationWarning: The Buffer() and new Buffer() constructors are not recommended for use due to security and usability concerns. Please use the new Buffer.alloc(), Buffer.allocUnsafe(), or Buffer.from() construction methods instead. + at showFlaggedDeprecation (buffer.js:127:13) + at new Buffer (buffer.js:148:3) + at Object. (/path/to/example.js:2:13) + [... more stack trace lines ...] +``` + +### Finding problematic bits of code using linters + +Eslint rules [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +also find calls to deprecated `Buffer()` API. Those rules are included in some pre-sets. + +There is a drawback, though, that it doesn't always +[work correctly](https://github.com/chalker/safer-buffer#why-not-safe-buffer) when `Buffer` is +overriden e.g. with a polyfill, so recommended is a combination of this and some other method +described above. + + +## Variant 1: Drop support for Node.js ≤ 4.4.x and 5.0.0 — 5.9.x. + +This is the recommended solution nowadays that would imply only minimal overhead. + +The Node.js 5.x release line has been unsupported since July 2016, and the Node.js 4.x release line reaches its End of Life in April 2018 (→ [Schedule](https://github.com/nodejs/Release#release-schedule)). This means that these versions of Node.js will *not* receive any updates, even in case of security issues, so using these release lines should be avoided, if at all possible. + +What you would do in this case is to convert all `new Buffer()` or `Buffer()` calls to use `Buffer.alloc()` or `Buffer.from()`, in the following way: + +- For `new Buffer(number)`, replace it with `Buffer.alloc(number)`. +- For `new Buffer(string)` (or `new Buffer(string, encoding)`), replace it with `Buffer.from(string)` (or `Buffer.from(string, encoding)`). +- For all other combinations of arguments (these are much rarer), also replace `new Buffer(...arguments)` with `Buffer.from(...arguments)`. + +Note that `Buffer.alloc()` is also _faster_ on the current Node.js versions than +`new Buffer(size).fill(0)`, which is what you would otherwise need to ensure zero-filling. + +Enabling eslint rule [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +is recommended to avoid accidential unsafe Buffer API usage. + +There is also a [JSCodeshift codemod](https://github.com/joyeecheung/node-dep-codemod#dep005) +for automatically migrating Buffer constructors to `Buffer.alloc()` or `Buffer.from()`. +Note that it currently only works with cases where the arguments are literals or where the +constructor is invoked with two arguments. + +_If you currently support those older Node.js versions and dropping them would be a semver-major change +for you, or if you support older branches of your packages, consider using [Variant 2](#variant-2) +or [Variant 3](#variant-3) on older branches, so people using those older branches will also receive +the fix. That way, you will eradicate potential issues caused by unguarded Buffer API usage and +your users will not observe a runtime deprecation warning when running your code on Node.js 10._ + + +## Variant 2: Use a polyfill + +Utilize [safer-buffer](https://www.npmjs.com/package/safer-buffer) as a polyfill to support older +Node.js versions. + +You would take exacly the same steps as in [Variant 1](#variant-1), but with a polyfill +`const Buffer = require('safer-buffer').Buffer` in all files where you use the new `Buffer` api. + +Make sure that you do not use old `new Buffer` API — in any files where the line above is added, +using old `new Buffer()` API will _throw_. It will be easy to notice that in CI, though. + +Alternatively, you could use [buffer-from](https://www.npmjs.com/package/buffer-from) and/or +[buffer-alloc](https://www.npmjs.com/package/buffer-alloc) [ponyfills](https://ponyfill.com/) — +those are great, the only downsides being 4 deps in the tree and slightly more code changes to +migrate off them (as you would be using e.g. `Buffer.from` under a different name). If you need only +`Buffer.from` polyfilled — `buffer-from` alone which comes with no extra dependencies. + +_Alternatively, you could use [safe-buffer](https://www.npmjs.com/package/safe-buffer) — it also +provides a polyfill, but takes a different approach which has +[it's drawbacks](https://github.com/chalker/safer-buffer#why-not-safe-buffer). It will allow you +to also use the older `new Buffer()` API in your code, though — but that's arguably a benefit, as +it is problematic, can cause issues in your code, and will start emitting runtime deprecation +warnings starting with Node.js 10._ + +Note that in either case, it is important that you also remove all calls to the old Buffer +API manually — just throwing in `safe-buffer` doesn't fix the problem by itself, it just provides +a polyfill for the new API. I have seen people doing that mistake. + +Enabling eslint rule [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +is recommended. + +_Don't forget to drop the polyfill usage once you drop support for Node.js < 4.5.0._ + + +## Variant 3 — manual detection, with safeguards + +This is useful if you create Buffer instances in only a few places (e.g. one), or you have your own +wrapper around them. + +### Buffer(0) + +This special case for creating empty buffers can be safely replaced with `Buffer.concat([])`, which +returns the same result all the way down to Node.js 0.8.x. + +### Buffer(notNumber) + +Before: + +```js +var buf = new Buffer(notNumber, encoding); +``` + +After: + +```js +var buf; +if (Buffer.from && Buffer.from !== Uint8Array.from) { + buf = Buffer.from(notNumber, encoding); +} else { + if (typeof notNumber === 'number') + throw new Error('The "size" argument must be of type number.'); + buf = new Buffer(notNumber, encoding); +} +``` + +`encoding` is optional. + +Note that the `typeof notNumber` before `new Buffer` is required (for cases when `notNumber` argument is not +hard-coded) and _is not caused by the deprecation of Buffer constructor_ — it's exactly _why_ the +Buffer constructor is deprecated. Ecosystem packages lacking this type-check caused numereous +security issues — situations when unsanitized user input could end up in the `Buffer(arg)` create +problems ranging from DoS to leaking sensitive information to the attacker from the process memory. + +When `notNumber` argument is hardcoded (e.g. literal `"abc"` or `[0,1,2]`), the `typeof` check can +be omitted. + +Also note that using TypeScript does not fix this problem for you — when libs written in +`TypeScript` are used from JS, or when user input ends up there — it behaves exactly as pure JS, as +all type checks are translation-time only and are not present in the actual JS code which TS +compiles to. + +### Buffer(number) + +For Node.js 0.10.x (and below) support: + +```js +var buf; +if (Buffer.alloc) { + buf = Buffer.alloc(number); +} else { + buf = new Buffer(number); + buf.fill(0); +} +``` + +Otherwise (Node.js ≥ 0.12.x): + +```js +const buf = Buffer.alloc ? Buffer.alloc(number) : new Buffer(number).fill(0); +``` + +## Regarding Buffer.allocUnsafe + +Be extra cautious when using `Buffer.allocUnsafe`: + * Don't use it if you don't have a good reason to + * e.g. you probably won't ever see a performance difference for small buffers, in fact, those + might be even faster with `Buffer.alloc()`, + * if your code is not in the hot code path — you also probably won't notice a difference, + * keep in mind that zero-filling minimizes the potential risks. + * If you use it, make sure that you never return the buffer in a partially-filled state, + * if you are writing to it sequentially — always truncate it to the actuall written length + +Errors in handling buffers allocated with `Buffer.allocUnsafe` could result in various issues, +ranged from undefined behaviour of your code to sensitive data (user input, passwords, certs) +leaking to the remote attacker. + +_Note that the same applies to `new Buffer` usage without zero-filling, depending on the Node.js +version (and lacking type checks also adds DoS to the list of potential problems)._ + + +## FAQ + + +### What is wrong with the `Buffer` constructor? + +The `Buffer` constructor could be used to create a buffer in many different ways: + +- `new Buffer(42)` creates a `Buffer` of 42 bytes. Before Node.js 8, this buffer contained + *arbitrary memory* for performance reasons, which could include anything ranging from + program source code to passwords and encryption keys. +- `new Buffer('abc')` creates a `Buffer` that contains the UTF-8-encoded version of + the string `'abc'`. A second argument could specify another encoding: For example, + `new Buffer(string, 'base64')` could be used to convert a Base64 string into the original + sequence of bytes that it represents. +- There are several other combinations of arguments. + +This meant that, in code like `var buffer = new Buffer(foo);`, *it is not possible to tell +what exactly the contents of the generated buffer are* without knowing the type of `foo`. + +Sometimes, the value of `foo` comes from an external source. For example, this function +could be exposed as a service on a web server, converting a UTF-8 string into its Base64 form: + +``` +function stringToBase64(req, res) { + // The request body should have the format of `{ string: 'foobar' }` + const rawBytes = new Buffer(req.body.string) + const encoded = rawBytes.toString('base64') + res.end({ encoded: encoded }) +} +``` + +Note that this code does *not* validate the type of `req.body.string`: + +- `req.body.string` is expected to be a string. If this is the case, all goes well. +- `req.body.string` is controlled by the client that sends the request. +- If `req.body.string` is the *number* `50`, the `rawBytes` would be 50 bytes: + - Before Node.js 8, the content would be uninitialized + - After Node.js 8, the content would be `50` bytes with the value `0` + +Because of the missing type check, an attacker could intentionally send a number +as part of the request. Using this, they can either: + +- Read uninitialized memory. This **will** leak passwords, encryption keys and other + kinds of sensitive information. (Information leak) +- Force the program to allocate a large amount of memory. For example, when specifying + `500000000` as the input value, each request will allocate 500MB of memory. + This can be used to either exhaust the memory available of a program completely + and make it crash, or slow it down significantly. (Denial of Service) + +Both of these scenarios are considered serious security issues in a real-world +web server context. + +when using `Buffer.from(req.body.string)` instead, passing a number will always +throw an exception instead, giving a controlled behaviour that can always be +handled by the program. + + +### The `Buffer()` constructor has been deprecated for a while. Is this really an issue? + +Surveys of code in the `npm` ecosystem have shown that the `Buffer()` constructor is still +widely used. This includes new code, and overall usage of such code has actually been +*increasing*. diff --git a/node_modules/fsevents/node_modules/safer-buffer/Readme.md b/node_modules/fsevents/node_modules/safer-buffer/Readme.md new file mode 100644 index 00000000..14b08229 --- /dev/null +++ b/node_modules/fsevents/node_modules/safer-buffer/Readme.md @@ -0,0 +1,156 @@ +# safer-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![javascript style guide][standard-image]][standard-url] [![Security Responsible Disclosure][secuirty-image]][secuirty-url] + +[travis-image]: https://travis-ci.org/ChALkeR/safer-buffer.svg?branch=master +[travis-url]: https://travis-ci.org/ChALkeR/safer-buffer +[npm-image]: https://img.shields.io/npm/v/safer-buffer.svg +[npm-url]: https://npmjs.org/package/safer-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com +[secuirty-image]: https://img.shields.io/badge/Security-Responsible%20Disclosure-green.svg +[secuirty-url]: https://github.com/nodejs/security-wg/blob/master/processes/responsible_disclosure_template.md + +Modern Buffer API polyfill without footguns, working on Node.js from 0.8 to current. + +## How to use? + +First, port all `Buffer()` and `new Buffer()` calls to `Buffer.alloc()` and `Buffer.from()` API. + +Then, to achieve compatibility with outdated Node.js versions (`<4.5.0` and 5.x `<5.9.0`), use +`const Buffer = require('safer-buffer').Buffer` in all files where you make calls to the new +Buffer API. _Use `var` instead of `const` if you need that for your Node.js version range support._ + +Also, see the +[porting Buffer](https://github.com/ChALkeR/safer-buffer/blob/master/Porting-Buffer.md) guide. + +## Do I need it? + +Hopefully, not — dropping support for outdated Node.js versions should be fine nowdays, and that +is the recommended path forward. You _do_ need to port to the `Buffer.alloc()` and `Buffer.from()` +though. + +See the [porting guide](https://github.com/ChALkeR/safer-buffer/blob/master/Porting-Buffer.md) +for a better description. + +## Why not [safe-buffer](https://npmjs.com/safe-buffer)? + +_In short: while `safe-buffer` serves as a polyfill for the new API, it allows old API usage and +itself contains footguns._ + +`safe-buffer` could be used safely to get the new API while still keeping support for older +Node.js versions (like this module), but while analyzing ecosystem usage of the old Buffer API +I found out that `safe-buffer` is itself causing problems in some cases. + +For example, consider the following snippet: + +```console +$ cat example.unsafe.js +console.log(Buffer(20)) +$ ./node-v6.13.0-linux-x64/bin/node example.unsafe.js + +$ standard example.unsafe.js +standard: Use JavaScript Standard Style (https://standardjs.com) + /home/chalker/repo/safer-buffer/example.unsafe.js:2:13: 'Buffer()' was deprecated since v6. Use 'Buffer.alloc()' or 'Buffer.from()' (use 'https://www.npmjs.com/package/safe-buffer' for '<4.5.0') instead. +``` + +This is allocates and writes to console an uninitialized chunk of memory. +[standard](https://www.npmjs.com/package/standard) linter (among others) catch that and warn people +to avoid using unsafe API. + +Let's now throw in `safe-buffer`! + +```console +$ cat example.safe-buffer.js +const Buffer = require('safe-buffer').Buffer +console.log(Buffer(20)) +$ standard example.safe-buffer.js +$ ./node-v6.13.0-linux-x64/bin/node example.safe-buffer.js + +``` + +See the problem? Adding in `safe-buffer` _magically removes the lint warning_, but the behavior +remains identiсal to what we had before, and when launched on Node.js 6.x LTS — this dumps out +chunks of uninitialized memory. +_And this code will still emit runtime warnings on Node.js 10.x and above._ + +That was done by design. I first considered changing `safe-buffer`, prohibiting old API usage or +emitting warnings on it, but that significantly diverges from `safe-buffer` design. After some +discussion, it was decided to move my approach into a separate package, and _this is that separate +package_. + +This footgun is not imaginary — I observed top-downloaded packages doing that kind of thing, +«fixing» the lint warning by blindly including `safe-buffer` without any actual changes. + +Also in some cases, even if the API _was_ migrated to use of safe Buffer API — a random pull request +can bring unsafe Buffer API usage back to the codebase by adding new calls — and that could go +unnoticed even if you have a linter prohibiting that (becase of the reason stated above), and even +pass CI. _I also observed that being done in popular packages._ + +Some examples: + * [webdriverio](https://github.com/webdriverio/webdriverio/commit/05cbd3167c12e4930f09ef7cf93b127ba4effae4#diff-124380949022817b90b622871837d56cR31) + (a module with 548 759 downloads/month), + * [websocket-stream](https://github.com/maxogden/websocket-stream/commit/c9312bd24d08271687d76da0fe3c83493871cf61) + (218 288 d/m, fix in [maxogden/websocket-stream#142](https://github.com/maxogden/websocket-stream/pull/142)), + * [node-serialport](https://github.com/node-serialport/node-serialport/commit/e8d9d2b16c664224920ce1c895199b1ce2def48c) + (113 138 d/m, fix in [node-serialport/node-serialport#1510](https://github.com/node-serialport/node-serialport/pull/1510)), + * [karma](https://github.com/karma-runner/karma/commit/3d94b8cf18c695104ca195334dc75ff054c74eec) + (3 973 193 d/m, fix in [karma-runner/karma#2947](https://github.com/karma-runner/karma/pull/2947)), + * [spdy-transport](https://github.com/spdy-http2/spdy-transport/commit/5375ac33f4a62a4f65bcfc2827447d42a5dbe8b1) + (5 970 727 d/m, fix in [spdy-http2/spdy-transport#53](https://github.com/spdy-http2/spdy-transport/pull/53)). + * And there are a lot more over the ecosystem. + +I filed a PR at +[mysticatea/eslint-plugin-node#110](https://github.com/mysticatea/eslint-plugin-node/pull/110) to +partially fix that (for cases when that lint rule is used), but it is a semver-major change for +linter rules and presets, so it would take significant time for that to reach actual setups. +_It also hasn't been released yet (2018-03-20)._ + +Also, `safer-buffer` discourages the usage of `.allocUnsafe()`, which is often done by a mistake. +It still supports it with an explicit concern barier, by placing it under +`require('safer-buffer/dangereous')`. + +## But isn't throwing bad? + +Not really. It's an error that could be noticed and fixed early, instead of causing havoc later like +unguarded `new Buffer()` calls that end up receiving user input can do. + +This package affects only the files where `var Buffer = require('safer-buffer').Buffer` was done, so +it is really simple to keep track of things and make sure that you don't mix old API usage with that. +Also, CI should hint anything that you might have missed. + +New commits, if tested, won't land new usage of unsafe Buffer API this way. +_Node.js 10.x also deals with that by printing a runtime depecation warning._ + +### Would it affect third-party modules? + +No, unless you explicitly do an awful thing like monkey-patching or overriding the built-in `Buffer`. +Don't do that. + +### But I don't want throwing… + +That is also fine! + +Also, it could be better in some cases when you don't comprehensive enough test coverage. + +In that case — just don't override `Buffer` and use +`var SaferBuffer = require('safer-buffer').Buffer` instead. + +That way, everything using `Buffer` natively would still work, but there would be two drawbacks: + +* `Buffer.from`/`Buffer.alloc` won't be polyfilled — use `SaferBuffer.from` and + `SaferBuffer.alloc` instead. +* You are still open to accidentally using the insecure deprecated API — use a linter to catch that. + +Note that using a linter to catch accidential `Buffer` constructor usage in this case is strongly +recommended. `Buffer` is not overriden in this usecase, so linters won't get confused. + +## «Without footguns»? + +Well, it is still possible to do _some_ things with `Buffer` API, e.g. accessing `.buffer` property +on older versions and duping things from there. You shouldn't do that in your code, probabably. + +The intention is to remove the most significant footguns that affect lots of packages in the +ecosystem, and to do it in the proper way. + +Also, this package doesn't protect against security issues affecting some Node.js versions, so for +usage in your own production code, it is still recommended to update to a Node.js version +[supported by upstream](https://github.com/nodejs/release#release-schedule). diff --git a/node_modules/fsevents/node_modules/safer-buffer/dangerous.js b/node_modules/fsevents/node_modules/safer-buffer/dangerous.js new file mode 100644 index 00000000..ca41fdc5 --- /dev/null +++ b/node_modules/fsevents/node_modules/safer-buffer/dangerous.js @@ -0,0 +1,58 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var buffer = require('buffer') +var Buffer = buffer.Buffer +var safer = require('./safer.js') +var Safer = safer.Buffer + +var dangerous = {} + +var key + +for (key in safer) { + if (!safer.hasOwnProperty(key)) continue + dangerous[key] = safer[key] +} + +var Dangereous = dangerous.Buffer = {} + +// Copy Safer API +for (key in Safer) { + if (!Safer.hasOwnProperty(key)) continue + Dangereous[key] = Safer[key] +} + +// Copy those missing unsafe methods, if they are present +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (Dangereous.hasOwnProperty(key)) continue + Dangereous[key] = Buffer[key] +} + +if (!Dangereous.allocUnsafe) { + Dangereous.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + return Buffer(size) + } +} + +if (!Dangereous.allocUnsafeSlow) { + Dangereous.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + return buffer.SlowBuffer(size) + } +} + +module.exports = dangerous diff --git a/node_modules/fsevents/node_modules/safer-buffer/package.json b/node_modules/fsevents/node_modules/safer-buffer/package.json new file mode 100644 index 00000000..7b570524 --- /dev/null +++ b/node_modules/fsevents/node_modules/safer-buffer/package.json @@ -0,0 +1,63 @@ +{ + "_args": [ + [ + "safer-buffer@2.1.2", + "/Users/pipobscure/fsevents" + ] + ], + "_from": "safer-buffer@2.1.2", + "_id": "safer-buffer@2.1.2", + "_inBundle": false, + "_integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "_location": "/safer-buffer", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "safer-buffer@2.1.2", + "name": "safer-buffer", + "escapedName": "safer-buffer", + "rawSpec": "2.1.2", + "saveSpec": null, + "fetchSpec": "2.1.2" + }, + "_requiredBy": [ + "/iconv-lite" + ], + "_resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "_spec": "2.1.2", + "_where": "/Users/pipobscure/fsevents", + "author": { + "name": "Nikita Skovoroda", + "email": "chalkerx@gmail.com", + "url": "https://github.com/ChALkeR" + }, + "bugs": { + "url": "https://github.com/ChALkeR/safer-buffer/issues" + }, + "description": "Modern Buffer API polyfill without footguns", + "devDependencies": { + "standard": "^11.0.1", + "tape": "^4.9.0" + }, + "files": [ + "Porting-Buffer.md", + "Readme.md", + "tests.js", + "dangerous.js", + "safer.js" + ], + "homepage": "https://github.com/ChALkeR/safer-buffer#readme", + "license": "MIT", + "main": "safer.js", + "name": "safer-buffer", + "repository": { + "type": "git", + "url": "git+https://github.com/ChALkeR/safer-buffer.git" + }, + "scripts": { + "browserify-test": "browserify --external tape tests.js > browserify-tests.js && tape browserify-tests.js", + "test": "standard && tape tests.js" + }, + "version": "2.1.2" +} diff --git a/node_modules/fsevents/node_modules/safer-buffer/safer.js b/node_modules/fsevents/node_modules/safer-buffer/safer.js new file mode 100644 index 00000000..37c7e1aa --- /dev/null +++ b/node_modules/fsevents/node_modules/safer-buffer/safer.js @@ -0,0 +1,77 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var buffer = require('buffer') +var Buffer = buffer.Buffer + +var safer = {} + +var key + +for (key in buffer) { + if (!buffer.hasOwnProperty(key)) continue + if (key === 'SlowBuffer' || key === 'Buffer') continue + safer[key] = buffer[key] +} + +var Safer = safer.Buffer = {} +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue + Safer[key] = Buffer[key] +} + +safer.Buffer.prototype = Buffer.prototype + +if (!Safer.from || Safer.from === Uint8Array.from) { + Safer.from = function (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value) + } + if (value && typeof value.length === 'undefined') { + throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value) + } + return Buffer(value, encodingOrOffset, length) + } +} + +if (!Safer.alloc) { + Safer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + var buf = Buffer(size) + if (!fill || fill.length === 0) { + buf.fill(0) + } else if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + return buf + } +} + +if (!safer.kStringMaxLength) { + try { + safer.kStringMaxLength = process.binding('buffer').kStringMaxLength + } catch (e) { + // we can't determine kStringMaxLength in environments where process.binding + // is unsupported, so let's not set it + } +} + +if (!safer.constants) { + safer.constants = { + MAX_LENGTH: safer.kMaxLength + } + if (safer.kStringMaxLength) { + safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength + } +} + +module.exports = safer diff --git a/node_modules/fsevents/node_modules/safer-buffer/tests.js b/node_modules/fsevents/node_modules/safer-buffer/tests.js new file mode 100644 index 00000000..7ed2777c --- /dev/null +++ b/node_modules/fsevents/node_modules/safer-buffer/tests.js @@ -0,0 +1,406 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var test = require('tape') + +var buffer = require('buffer') + +var index = require('./') +var safer = require('./safer') +var dangerous = require('./dangerous') + +/* Inheritance tests */ + +test('Default is Safer', function (t) { + t.equal(index, safer) + t.notEqual(safer, dangerous) + t.notEqual(index, dangerous) + t.end() +}) + +test('Is not a function', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(typeof impl, 'object') + t.equal(typeof impl.Buffer, 'object') + }); + [buffer].forEach(function (impl) { + t.equal(typeof impl, 'object') + t.equal(typeof impl.Buffer, 'function') + }) + t.end() +}) + +test('Constructor throws', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.throws(function () { impl.Buffer() }) + t.throws(function () { impl.Buffer(0) }) + t.throws(function () { impl.Buffer('a') }) + t.throws(function () { impl.Buffer('a', 'utf-8') }) + t.throws(function () { return new impl.Buffer() }) + t.throws(function () { return new impl.Buffer(0) }) + t.throws(function () { return new impl.Buffer('a') }) + t.throws(function () { return new impl.Buffer('a', 'utf-8') }) + }) + t.end() +}) + +test('Safe methods exist', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(typeof impl.Buffer.alloc, 'function', 'alloc') + t.equal(typeof impl.Buffer.from, 'function', 'from') + }) + t.end() +}) + +test('Unsafe methods exist only in Dangerous', function (t) { + [index, safer].forEach(function (impl) { + t.equal(typeof impl.Buffer.allocUnsafe, 'undefined') + t.equal(typeof impl.Buffer.allocUnsafeSlow, 'undefined') + }); + [dangerous].forEach(function (impl) { + t.equal(typeof impl.Buffer.allocUnsafe, 'function') + t.equal(typeof impl.Buffer.allocUnsafeSlow, 'function') + }) + t.end() +}) + +test('Generic methods/properties are defined and equal', function (t) { + ['poolSize', 'isBuffer', 'concat', 'byteLength'].forEach(function (method) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], buffer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Built-in buffer static methods/properties are inherited', function (t) { + Object.keys(buffer).forEach(function (method) { + if (method === 'SlowBuffer' || method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], buffer[method], method) + t.notEqual(typeof impl[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Built-in Buffer static methods/properties are inherited', function (t) { + Object.keys(buffer.Buffer).forEach(function (method) { + if (method === 'allocUnsafe' || method === 'allocUnsafeSlow') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], buffer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('.prototype property of Buffer is inherited', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.prototype, buffer.Buffer.prototype, 'prototype') + t.notEqual(typeof impl.Buffer.prototype, 'undefined', 'prototype') + }) + t.end() +}) + +test('All Safer methods are present in Dangerous', function (t) { + Object.keys(safer).forEach(function (method) { + if (method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], safer[method], method) + if (method !== 'kStringMaxLength') { + t.notEqual(typeof impl[method], 'undefined', method) + } + }) + }) + Object.keys(safer.Buffer).forEach(function (method) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], safer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Safe methods from Dangerous methods are present in Safer', function (t) { + Object.keys(dangerous).forEach(function (method) { + if (method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], dangerous[method], method) + if (method !== 'kStringMaxLength') { + t.notEqual(typeof impl[method], 'undefined', method) + } + }) + }) + Object.keys(dangerous.Buffer).forEach(function (method) { + if (method === 'allocUnsafe' || method === 'allocUnsafeSlow') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], dangerous.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +/* Behaviour tests */ + +test('Methods return Buffers', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0, 10))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0, 'a'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(10))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(10, 'x'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(9, 'ab'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from(''))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('string'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('string', 'utf-8'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from([0, 42, 3]))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from(new Uint8Array([0, 42, 3])))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from([]))) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.ok(buffer.Buffer.isBuffer(dangerous.Buffer[method](0))) + t.ok(buffer.Buffer.isBuffer(dangerous.Buffer[method](10))) + }) + t.end() +}) + +test('Constructor is buffer.Buffer', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.alloc(0).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(0, 10).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(0, 'a').constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(10).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(10, 'x').constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(9, 'ab').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('string').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('string', 'utf-8').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64').constructor, buffer.Buffer) + t.equal(impl.Buffer.from([0, 42, 3]).constructor, buffer.Buffer) + t.equal(impl.Buffer.from(new Uint8Array([0, 42, 3])).constructor, buffer.Buffer) + t.equal(impl.Buffer.from([]).constructor, buffer.Buffer) + }); + [0, 10, 100].forEach(function (arg) { + t.equal(dangerous.Buffer.allocUnsafe(arg).constructor, buffer.Buffer) + t.equal(dangerous.Buffer.allocUnsafeSlow(arg).constructor, buffer.SlowBuffer(0).constructor) + }) + t.end() +}) + +test('Invalid calls throw', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.throws(function () { impl.Buffer.from(0) }) + t.throws(function () { impl.Buffer.from(10) }) + t.throws(function () { impl.Buffer.from(10, 'utf-8') }) + t.throws(function () { impl.Buffer.from('string', 'invalid encoding') }) + t.throws(function () { impl.Buffer.from(-10) }) + t.throws(function () { impl.Buffer.from(1e90) }) + t.throws(function () { impl.Buffer.from(Infinity) }) + t.throws(function () { impl.Buffer.from(-Infinity) }) + t.throws(function () { impl.Buffer.from(NaN) }) + t.throws(function () { impl.Buffer.from(null) }) + t.throws(function () { impl.Buffer.from(undefined) }) + t.throws(function () { impl.Buffer.from() }) + t.throws(function () { impl.Buffer.from({}) }) + t.throws(function () { impl.Buffer.alloc('') }) + t.throws(function () { impl.Buffer.alloc('string') }) + t.throws(function () { impl.Buffer.alloc('string', 'utf-8') }) + t.throws(function () { impl.Buffer.alloc('b25ldHdvdGhyZWU=', 'base64') }) + t.throws(function () { impl.Buffer.alloc(-10) }) + t.throws(function () { impl.Buffer.alloc(1e90) }) + t.throws(function () { impl.Buffer.alloc(2 * (1 << 30)) }) + t.throws(function () { impl.Buffer.alloc(Infinity) }) + t.throws(function () { impl.Buffer.alloc(-Infinity) }) + t.throws(function () { impl.Buffer.alloc(null) }) + t.throws(function () { impl.Buffer.alloc(undefined) }) + t.throws(function () { impl.Buffer.alloc() }) + t.throws(function () { impl.Buffer.alloc([]) }) + t.throws(function () { impl.Buffer.alloc([0, 42, 3]) }) + t.throws(function () { impl.Buffer.alloc({}) }) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.throws(function () { dangerous.Buffer[method]('') }) + t.throws(function () { dangerous.Buffer[method]('string') }) + t.throws(function () { dangerous.Buffer[method]('string', 'utf-8') }) + t.throws(function () { dangerous.Buffer[method](2 * (1 << 30)) }) + t.throws(function () { dangerous.Buffer[method](Infinity) }) + if (dangerous.Buffer[method] === buffer.Buffer.allocUnsafe) { + t.skip('Skipping, older impl of allocUnsafe coerced negative sizes to 0') + } else { + t.throws(function () { dangerous.Buffer[method](-10) }) + t.throws(function () { dangerous.Buffer[method](-1e90) }) + t.throws(function () { dangerous.Buffer[method](-Infinity) }) + } + t.throws(function () { dangerous.Buffer[method](null) }) + t.throws(function () { dangerous.Buffer[method](undefined) }) + t.throws(function () { dangerous.Buffer[method]() }) + t.throws(function () { dangerous.Buffer[method]([]) }) + t.throws(function () { dangerous.Buffer[method]([0, 42, 3]) }) + t.throws(function () { dangerous.Buffer[method]({}) }) + }) + t.end() +}) + +test('Buffers have appropriate lengths', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.alloc(0).length, 0) + t.equal(impl.Buffer.alloc(10).length, 10) + t.equal(impl.Buffer.from('').length, 0) + t.equal(impl.Buffer.from('string').length, 6) + t.equal(impl.Buffer.from('string', 'utf-8').length, 6) + t.equal(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64').length, 11) + t.equal(impl.Buffer.from([0, 42, 3]).length, 3) + t.equal(impl.Buffer.from(new Uint8Array([0, 42, 3])).length, 3) + t.equal(impl.Buffer.from([]).length, 0) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.equal(dangerous.Buffer[method](0).length, 0) + t.equal(dangerous.Buffer[method](10).length, 10) + }) + t.end() +}) + +test('Buffers have appropriate lengths (2)', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true; + [ safer.Buffer.alloc, + dangerous.Buffer.allocUnsafe, + dangerous.Buffer.allocUnsafeSlow + ].forEach(function (method) { + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 1e5) + var buf = method(length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + } + }) + t.ok(ok) + t.end() +}) + +test('.alloc(size) is zero-filled and has correct length', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var buf = index.Buffer.alloc(length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + var j + for (j = 0; j < length; j++) { + if (buf[j] !== 0) ok = false + } + buf.fill(1) + for (j = 0; j < length; j++) { + if (buf[j] !== 1) ok = false + } + } + t.ok(ok) + t.end() +}) + +test('.allocUnsafe / .allocUnsafeSlow are fillable and have correct lengths', function (t) { + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var buf = dangerous.Buffer[method](length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + buf.fill(0, 0, length) + var j + for (j = 0; j < length; j++) { + if (buf[j] !== 0) ok = false + } + buf.fill(1, 0, length) + for (j = 0; j < length; j++) { + if (buf[j] !== 1) ok = false + } + } + t.ok(ok, method) + }) + t.end() +}) + +test('.alloc(size, fill) is `fill`-filled', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var fill = Math.round(Math.random() * 255) + var buf = index.Buffer.alloc(length, fill) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + for (var j = 0; j < length; j++) { + if (buf[j] !== fill) ok = false + } + } + t.ok(ok) + t.end() +}) + +test('.alloc(size, fill) is `fill`-filled', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var fill = Math.round(Math.random() * 255) + var buf = index.Buffer.alloc(length, fill) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + for (var j = 0; j < length; j++) { + if (buf[j] !== fill) ok = false + } + } + t.ok(ok) + t.deepEqual(index.Buffer.alloc(9, 'a'), index.Buffer.alloc(9, 97)) + t.notDeepEqual(index.Buffer.alloc(9, 'a'), index.Buffer.alloc(9, 98)) + + var tmp = new buffer.Buffer(2) + tmp.fill('ok') + if (tmp[1] === tmp[0]) { + // Outdated Node.js + t.deepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('ooooo')) + } else { + t.deepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('okoko')) + } + t.notDeepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('kokok')) + + t.end() +}) + +test('safer.Buffer.from returns results same as Buffer constructor', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.deepEqual(impl.Buffer.from(''), new buffer.Buffer('')) + t.deepEqual(impl.Buffer.from('string'), new buffer.Buffer('string')) + t.deepEqual(impl.Buffer.from('string', 'utf-8'), new buffer.Buffer('string', 'utf-8')) + t.deepEqual(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'), new buffer.Buffer('b25ldHdvdGhyZWU=', 'base64')) + t.deepEqual(impl.Buffer.from([0, 42, 3]), new buffer.Buffer([0, 42, 3])) + t.deepEqual(impl.Buffer.from(new Uint8Array([0, 42, 3])), new buffer.Buffer(new Uint8Array([0, 42, 3]))) + t.deepEqual(impl.Buffer.from([]), new buffer.Buffer([])) + }) + t.end() +}) + +test('safer.Buffer.from returns consistent results', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.deepEqual(impl.Buffer.from(''), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from([]), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from(new Uint8Array([])), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from('string', 'utf-8'), impl.Buffer.from('string')) + t.deepEqual(impl.Buffer.from('string'), impl.Buffer.from([115, 116, 114, 105, 110, 103])) + t.deepEqual(impl.Buffer.from('string'), impl.Buffer.from(impl.Buffer.from('string'))) + t.deepEqual(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'), impl.Buffer.from('onetwothree')) + t.notDeepEqual(impl.Buffer.from('b25ldHdvdGhyZWU='), impl.Buffer.from('onetwothree')) + }) + t.end() +}) diff --git a/node_modules/fsevents/node_modules/sax/LICENSE b/node_modules/fsevents/node_modules/sax/LICENSE new file mode 100644 index 00000000..ccffa082 --- /dev/null +++ b/node_modules/fsevents/node_modules/sax/LICENSE @@ -0,0 +1,41 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +==== + +`String.fromCodePoint` by Mathias Bynens used according to terms of MIT +License, as follows: + + Copyright Mathias Bynens + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/sax/README.md b/node_modules/fsevents/node_modules/sax/README.md new file mode 100644 index 00000000..afcd3f3d --- /dev/null +++ b/node_modules/fsevents/node_modules/sax/README.md @@ -0,0 +1,225 @@ +# sax js + +A sax-style parser for XML and HTML. + +Designed with [node](http://nodejs.org/) in mind, but should work fine in +the browser or other CommonJS implementations. + +## What This Is + +* A very simple tool to parse through an XML string. +* A stepping stone to a streaming HTML parser. +* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML + docs. + +## What This Is (probably) Not + +* An HTML Parser - That's a fine goal, but this isn't it. It's just + XML. +* A DOM Builder - You can use it to build an object model out of XML, + but it doesn't do that out of the box. +* XSLT - No DOM = no querying. +* 100% Compliant with (some other SAX implementation) - Most SAX + implementations are in Java and do a lot more than this does. +* An XML Validator - It does a little validation when in strict mode, but + not much. +* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic + masochism. +* A DTD-aware Thing - Fetching DTDs is a much bigger job. + +## Regarding `Hello, world!').close(); + +// stream usage +// takes the same options as the parser +var saxStream = require("sax").createStream(strict, options) +saxStream.on("error", function (e) { + // unhandled errors will throw, since this is a proper node + // event emitter. + console.error("error!", e) + // clear the error + this._parser.error = null + this._parser.resume() +}) +saxStream.on("opentag", function (node) { + // same object as above +}) +// pipe is supported, and it's readable/writable +// same chunks coming in also go out. +fs.createReadStream("file.xml") + .pipe(saxStream) + .pipe(fs.createWriteStream("file-copy.xml")) +``` + + +## Arguments + +Pass the following arguments to the parser function. All are optional. + +`strict` - Boolean. Whether or not to be a jerk. Default: `false`. + +`opt` - Object bag of settings regarding string formatting. All default to `false`. + +Settings supported: + +* `trim` - Boolean. Whether or not to trim text and comment nodes. +* `normalize` - Boolean. If true, then turn any whitespace into a single + space. +* `lowercase` - Boolean. If true, then lowercase tag names and attribute names + in loose mode, rather than uppercasing them. +* `xmlns` - Boolean. If true, then namespaces are supported. +* `position` - Boolean. If false, then don't track line/col/position. +* `strictEntities` - Boolean. If true, only parse [predefined XML + entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent) + (`&`, `'`, `>`, `<`, and `"`) + +## Methods + +`write` - Write bytes onto the stream. You don't have to do this all at +once. You can keep writing as much as you want. + +`close` - Close the stream. Once closed, no more data may be written until +it is done processing the buffer, which is signaled by the `end` event. + +`resume` - To gracefully handle errors, assign a listener to the `error` +event. Then, when the error is taken care of, you can call `resume` to +continue parsing. Otherwise, the parser will not continue while in an error +state. + +## Members + +At all times, the parser object will have the following members: + +`line`, `column`, `position` - Indications of the position in the XML +document where the parser currently is looking. + +`startTagPosition` - Indicates the position where the current tag starts. + +`closed` - Boolean indicating whether or not the parser can be written to. +If it's `true`, then wait for the `ready` event to write again. + +`strict` - Boolean indicating whether or not the parser is a jerk. + +`opt` - Any options passed into the constructor. + +`tag` - The current tag being dealt with. + +And a bunch of other stuff that you probably shouldn't touch. + +## Events + +All events emit with a single argument. To listen to an event, assign a +function to `on`. Functions get executed in the this-context of +the parser object. The list of supported events are also in the exported +`EVENTS` array. + +When using the stream interface, assign handlers using the EventEmitter +`on` function in the normal fashion. + +`error` - Indication that something bad happened. The error will be hanging +out on `parser.error`, and must be deleted before parsing can continue. By +listening to this event, you can keep an eye on that kind of stuff. Note: +this happens *much* more in strict mode. Argument: instance of `Error`. + +`text` - Text node. Argument: string of text. + +`doctype` - The ``. Argument: +object with `name` and `body` members. Attributes are not parsed, as +processing instructions have implementation dependent semantics. + +`sgmldeclaration` - Random SGML declarations. Stuff like `` +would trigger this kind of event. This is a weird thing to support, so it +might go away at some point. SAX isn't intended to be used to parse SGML, +after all. + +`opentagstart` - Emitted immediately when the tag name is available, +but before any attributes are encountered. Argument: object with a +`name` field and an empty `attributes` set. Note that this is the +same object that will later be emitted in the `opentag` event. + +`opentag` - An opening tag. Argument: object with `name` and `attributes`. +In non-strict mode, tag names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, then it will contain +namespace binding information on the `ns` member, and will have a +`local`, `prefix`, and `uri` member. + +`closetag` - A closing tag. In loose mode, tags are auto-closed if their +parent closes. In strict mode, well-formedness is enforced. Note that +self-closing tags will have `closeTag` emitted immediately after `openTag`. +Argument: tag name. + +`attribute` - An attribute node. Argument: object with `name` and `value`. +In non-strict mode, attribute names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, it will also contains namespace +information. + +`comment` - A comment node. Argument: the string of the comment. + +`opencdata` - The opening tag of a ``) of a `` tags trigger a `"script"` +event, and their contents are not checked for special xml characters. +If you pass `noscript: true`, then this behavior is suppressed. + +## Reporting Problems + +It's best to write a failing test if you find an issue. I will always +accept pull requests with failing tests if they demonstrate intended +behavior, but it is very hard to figure out what issue you're describing +without a test. Writing a test is also the best way for you yourself +to figure out if you really understand the issue you think you have with +sax-js. diff --git a/node_modules/fsevents/node_modules/sax/lib/sax.js b/node_modules/fsevents/node_modules/sax/lib/sax.js new file mode 100644 index 00000000..795d607e --- /dev/null +++ b/node_modules/fsevents/node_modules/sax/lib/sax.js @@ -0,0 +1,1565 @@ +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = require('stream').Stream + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = require('string_decoder').StringDecoder + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, // + +``` + +--- + +To use iMurmurHash in Node.js, install the module using NPM: + +```bash +npm install imurmurhash +``` + +Then simply include it in your scripts: + +```javascript +MurmurHash3 = require('imurmurhash'); +``` + +Quick Example +------------- + +```javascript +// Create the initial hash +var hashState = MurmurHash3('string'); + +// Incrementally add text +hashState.hash('more strings'); +hashState.hash('even more strings'); + +// All calls can be chained if desired +hashState.hash('and').hash('some').hash('more'); + +// Get a result +hashState.result(); +// returns 0xe4ccfe6b +``` + +Functions +--------- + +### MurmurHash3 ([string], [seed]) +Get a hash state object, optionally initialized with the given _string_ and _seed_. _Seed_ must be a positive integer if provided. Calling this function without the `new` keyword will return a cached state object that has been reset. This is safe to use as long as the object is only used from a single thread and no other hashes are created while operating on this one. If this constraint cannot be met, you can use `new` to create a new state object. For example: + +```javascript +// Use the cached object, calling the function again will return the same +// object (but reset, so the current state would be lost) +hashState = MurmurHash3(); +... + +// Create a new object that can be safely used however you wish. Calling the +// function again will simply return a new state object, and no state loss +// will occur, at the cost of creating more objects. +hashState = new MurmurHash3(); +``` + +Both methods can be mixed however you like if you have different use cases. + +--- + +### MurmurHash3.prototype.hash (string) +Incrementally add _string_ to the hash. This can be called as many times as you want for the hash state object, including after a call to `result()`. Returns `this` so calls can be chained. + +--- + +### MurmurHash3.prototype.result () +Get the result of the hash as a 32-bit positive integer. This performs the tail and finalizer portions of the algorithm, but does not store the result in the state object. This means that it is perfectly safe to get results and then continue adding strings via `hash`. + +```javascript +// Do the whole string at once +MurmurHash3('this is a test string').result(); +// 0x70529328 + +// Do part of the string, get a result, then the other part +var m = MurmurHash3('this is a'); +m.result(); +// 0xbfc4f834 +m.hash(' test string').result(); +// 0x70529328 (same as above) +``` + +--- + +### MurmurHash3.prototype.reset ([seed]) +Reset the state object for reuse, optionally using the given _seed_ (defaults to 0 like the constructor). Returns `this` so calls can be chained. + +--- + +License (MIT) +------------- +Copyright (c) 2013 Gary Court, Jens Taylor + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/imurmurhash/imurmurhash.js b/node_modules/imurmurhash/imurmurhash.js new file mode 100644 index 00000000..e63146a2 --- /dev/null +++ b/node_modules/imurmurhash/imurmurhash.js @@ -0,0 +1,138 @@ +/** + * @preserve + * JS Implementation of incremental MurmurHash3 (r150) (as of May 10, 2013) + * + * @author Jens Taylor + * @see http://github.com/homebrewing/brauhaus-diff + * @author Gary Court + * @see http://github.com/garycourt/murmurhash-js + * @author Austin Appleby + * @see http://sites.google.com/site/murmurhash/ + */ +(function(){ + var cache; + + // Call this function without `new` to use the cached object (good for + // single-threaded environments), or with `new` to create a new object. + // + // @param {string} key A UTF-16 or ASCII string + // @param {number} seed An optional positive integer + // @return {object} A MurmurHash3 object for incremental hashing + function MurmurHash3(key, seed) { + var m = this instanceof MurmurHash3 ? this : cache; + m.reset(seed) + if (typeof key === 'string' && key.length > 0) { + m.hash(key); + } + + if (m !== this) { + return m; + } + }; + + // Incrementally add a string to this hash + // + // @param {string} key A UTF-16 or ASCII string + // @return {object} this + MurmurHash3.prototype.hash = function(key) { + var h1, k1, i, top, len; + + len = key.length; + this.len += len; + + k1 = this.k1; + i = 0; + switch (this.rem) { + case 0: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) : 0; + case 1: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 8 : 0; + case 2: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 16 : 0; + case 3: + k1 ^= len > i ? (key.charCodeAt(i) & 0xff) << 24 : 0; + k1 ^= len > i ? (key.charCodeAt(i++) & 0xff00) >> 8 : 0; + } + + this.rem = (len + this.rem) & 3; // & 3 is same as % 4 + len -= this.rem; + if (len > 0) { + h1 = this.h1; + while (1) { + k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff; + k1 = (k1 << 15) | (k1 >>> 17); + k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff; + + h1 ^= k1; + h1 = (h1 << 13) | (h1 >>> 19); + h1 = (h1 * 5 + 0xe6546b64) & 0xffffffff; + + if (i >= len) { + break; + } + + k1 = ((key.charCodeAt(i++) & 0xffff)) ^ + ((key.charCodeAt(i++) & 0xffff) << 8) ^ + ((key.charCodeAt(i++) & 0xffff) << 16); + top = key.charCodeAt(i++); + k1 ^= ((top & 0xff) << 24) ^ + ((top & 0xff00) >> 8); + } + + k1 = 0; + switch (this.rem) { + case 3: k1 ^= (key.charCodeAt(i + 2) & 0xffff) << 16; + case 2: k1 ^= (key.charCodeAt(i + 1) & 0xffff) << 8; + case 1: k1 ^= (key.charCodeAt(i) & 0xffff); + } + + this.h1 = h1; + } + + this.k1 = k1; + return this; + }; + + // Get the result of this hash + // + // @return {number} The 32-bit hash + MurmurHash3.prototype.result = function() { + var k1, h1; + + k1 = this.k1; + h1 = this.h1; + + if (k1 > 0) { + k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff; + k1 = (k1 << 15) | (k1 >>> 17); + k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff; + h1 ^= k1; + } + + h1 ^= this.len; + + h1 ^= h1 >>> 16; + h1 = (h1 * 0xca6b + (h1 & 0xffff) * 0x85eb0000) & 0xffffffff; + h1 ^= h1 >>> 13; + h1 = (h1 * 0xae35 + (h1 & 0xffff) * 0xc2b20000) & 0xffffffff; + h1 ^= h1 >>> 16; + + return h1 >>> 0; + }; + + // Reset the hash object for reuse + // + // @param {number} seed An optional positive integer + MurmurHash3.prototype.reset = function(seed) { + this.h1 = typeof seed === 'number' ? seed : 0; + this.rem = this.k1 = this.len = 0; + return this; + }; + + // A cached object to use. This can be safely used if you're in a single- + // threaded environment, otherwise you need to create new hashes to use. + cache = new MurmurHash3(); + + if (typeof(module) != 'undefined') { + module.exports = MurmurHash3; + } else { + this.MurmurHash3 = MurmurHash3; + } +}()); diff --git a/node_modules/imurmurhash/imurmurhash.min.js b/node_modules/imurmurhash/imurmurhash.min.js new file mode 100644 index 00000000..dc0ee88d --- /dev/null +++ b/node_modules/imurmurhash/imurmurhash.min.js @@ -0,0 +1,12 @@ +/** + * @preserve + * JS Implementation of incremental MurmurHash3 (r150) (as of May 10, 2013) + * + * @author Jens Taylor + * @see http://github.com/homebrewing/brauhaus-diff + * @author Gary Court + * @see http://github.com/garycourt/murmurhash-js + * @author Austin Appleby + * @see http://sites.google.com/site/murmurhash/ + */ +!function(){function t(h,r){var s=this instanceof t?this:e;return s.reset(r),"string"==typeof h&&h.length>0&&s.hash(h),s!==this?s:void 0}var e;t.prototype.hash=function(t){var e,h,r,s,i;switch(i=t.length,this.len+=i,h=this.k1,r=0,this.rem){case 0:h^=i>r?65535&t.charCodeAt(r++):0;case 1:h^=i>r?(65535&t.charCodeAt(r++))<<8:0;case 2:h^=i>r?(65535&t.charCodeAt(r++))<<16:0;case 3:h^=i>r?(255&t.charCodeAt(r))<<24:0,h^=i>r?(65280&t.charCodeAt(r++))>>8:0}if(this.rem=3&i+this.rem,i-=this.rem,i>0){for(e=this.h1;;){if(h=4294967295&11601*h+3432906752*(65535&h),h=h<<15|h>>>17,h=4294967295&13715*h+461832192*(65535&h),e^=h,e=e<<13|e>>>19,e=4294967295&5*e+3864292196,r>=i)break;h=65535&t.charCodeAt(r++)^(65535&t.charCodeAt(r++))<<8^(65535&t.charCodeAt(r++))<<16,s=t.charCodeAt(r++),h^=(255&s)<<24^(65280&s)>>8}switch(h=0,this.rem){case 3:h^=(65535&t.charCodeAt(r+2))<<16;case 2:h^=(65535&t.charCodeAt(r+1))<<8;case 1:h^=65535&t.charCodeAt(r)}this.h1=e}return this.k1=h,this},t.prototype.result=function(){var t,e;return t=this.k1,e=this.h1,t>0&&(t=4294967295&11601*t+3432906752*(65535&t),t=t<<15|t>>>17,t=4294967295&13715*t+461832192*(65535&t),e^=t),e^=this.len,e^=e>>>16,e=4294967295&51819*e+2246770688*(65535&e),e^=e>>>13,e=4294967295&44597*e+3266445312*(65535&e),e^=e>>>16,e>>>0},t.prototype.reset=function(t){return this.h1="number"==typeof t?t:0,this.rem=this.k1=this.len=0,this},e=new t,"undefined"!=typeof module?module.exports=t:this.MurmurHash3=t}(); \ No newline at end of file diff --git a/node_modules/imurmurhash/package.json b/node_modules/imurmurhash/package.json new file mode 100644 index 00000000..8a93edb5 --- /dev/null +++ b/node_modules/imurmurhash/package.json @@ -0,0 +1,40 @@ +{ + "name": "imurmurhash", + "version": "0.1.4", + "description": "An incremental implementation of MurmurHash3", + "homepage": "https://github.com/jensyt/imurmurhash-js", + "main": "imurmurhash.js", + "files": [ + "imurmurhash.js", + "imurmurhash.min.js", + "package.json", + "README.md" + ], + "repository": { + "type": "git", + "url": "https://github.com/jensyt/imurmurhash-js" + }, + "bugs": { + "url": "https://github.com/jensyt/imurmurhash-js/issues" + }, + "keywords": [ + "murmur", + "murmurhash", + "murmurhash3", + "hash", + "incremental" + ], + "author": { + "name": "Jens Taylor", + "email": "jensyt@gmail.com", + "url": "https://github.com/homebrewing" + }, + "license": "MIT", + "dependencies": { + }, + "devDependencies": { + }, + "engines": { + "node": ">=0.8.19" + } +} diff --git a/node_modules/inflight/LICENSE b/node_modules/inflight/LICENSE new file mode 100644 index 00000000..05eeeb88 --- /dev/null +++ b/node_modules/inflight/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/inflight/README.md b/node_modules/inflight/README.md new file mode 100644 index 00000000..6dc89291 --- /dev/null +++ b/node_modules/inflight/README.md @@ -0,0 +1,37 @@ +# inflight + +Add callbacks to requests in flight to avoid async duplication + +## USAGE + +```javascript +var inflight = require('inflight') + +// some request that does some stuff +function req(key, callback) { + // key is any random string. like a url or filename or whatever. + // + // will return either a falsey value, indicating that the + // request for this key is already in flight, or a new callback + // which when called will call all callbacks passed to inflightk + // with the same key + callback = inflight(key, callback) + + // If we got a falsey value back, then there's already a req going + if (!callback) return + + // this is where you'd fetch the url or whatever + // callback is also once()-ified, so it can safely be assigned + // to multiple events etc. First call wins. + setTimeout(function() { + callback(null, key) + }, 100) +} + +// only assigns a single setTimeout +// when it dings, all cbs get called +req('foo', cb1) +req('foo', cb2) +req('foo', cb3) +req('foo', cb4) +``` diff --git a/node_modules/inflight/inflight.js b/node_modules/inflight/inflight.js new file mode 100644 index 00000000..48202b3c --- /dev/null +++ b/node_modules/inflight/inflight.js @@ -0,0 +1,54 @@ +var wrappy = require('wrappy') +var reqs = Object.create(null) +var once = require('once') + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} diff --git a/node_modules/inflight/package.json b/node_modules/inflight/package.json new file mode 100644 index 00000000..6084d350 --- /dev/null +++ b/node_modules/inflight/package.json @@ -0,0 +1,29 @@ +{ + "name": "inflight", + "version": "1.0.6", + "description": "Add callbacks to requests in flight to avoid async duplication", + "main": "inflight.js", + "files": [ + "inflight.js" + ], + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.1.2" + }, + "scripts": { + "test": "tap test.js --100" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/inflight.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "bugs": { + "url": "https://github.com/isaacs/inflight/issues" + }, + "homepage": "https://github.com/isaacs/inflight", + "license": "ISC" +} diff --git a/node_modules/inherits/LICENSE b/node_modules/inherits/LICENSE new file mode 100644 index 00000000..dea3013d --- /dev/null +++ b/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/inherits/README.md b/node_modules/inherits/README.md new file mode 100644 index 00000000..b1c56658 --- /dev/null +++ b/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js new file mode 100644 index 00000000..f71f2d93 --- /dev/null +++ b/node_modules/inherits/inherits.js @@ -0,0 +1,9 @@ +try { + var util = require('util'); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = require('./inherits_browser.js'); +} diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js new file mode 100644 index 00000000..86bbb3dc --- /dev/null +++ b/node_modules/inherits/inherits_browser.js @@ -0,0 +1,27 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json new file mode 100644 index 00000000..37b4366b --- /dev/null +++ b/node_modules/inherits/package.json @@ -0,0 +1,29 @@ +{ + "name": "inherits", + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "version": "2.0.4", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "main": "./inherits.js", + "browser": "./inherits_browser.js", + "repository": "git://github.com/isaacs/inherits", + "license": "ISC", + "scripts": { + "test": "tap" + }, + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ] +} diff --git a/node_modules/ini/LICENSE b/node_modules/ini/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/ini/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/ini/README.md b/node_modules/ini/README.md new file mode 100644 index 00000000..33df2582 --- /dev/null +++ b/node_modules/ini/README.md @@ -0,0 +1,102 @@ +An ini format parser and serializer for node. + +Sections are treated as nested objects. Items before the first +heading are saved on the object directly. + +## Usage + +Consider an ini-file `config.ini` that looks like this: + + ; this comment is being ignored + scope = global + + [database] + user = dbuser + password = dbpassword + database = use_this_database + + [paths.default] + datadir = /var/lib/data + array[] = first value + array[] = second value + array[] = third value + +You can read, manipulate and write the ini-file like so: + + var fs = require('fs') + , ini = require('ini') + + var config = ini.parse(fs.readFileSync('./config.ini', 'utf-8')) + + config.scope = 'local' + config.database.database = 'use_another_database' + config.paths.default.tmpdir = '/tmp' + delete config.paths.default.datadir + config.paths.default.array.push('fourth value') + + fs.writeFileSync('./config_modified.ini', ini.stringify(config, { section: 'section' })) + +This will result in a file called `config_modified.ini` being written +to the filesystem with the following content: + + [section] + scope=local + [section.database] + user=dbuser + password=dbpassword + database=use_another_database + [section.paths.default] + tmpdir=/tmp + array[]=first value + array[]=second value + array[]=third value + array[]=fourth value + + +## API + +### decode(inistring) + +Decode the ini-style formatted `inistring` into a nested object. + +### parse(inistring) + +Alias for `decode(inistring)` + +### encode(object, [options]) + +Encode the object `object` into an ini-style formatted string. If the +optional parameter `section` is given, then all top-level properties +of the object are put into this section and the `section`-string is +prepended to all sub-sections, see the usage example above. + +The `options` object may contain the following: + +* `section` A string which will be the first `section` in the encoded + ini data. Defaults to none. +* `whitespace` Boolean to specify whether to put whitespace around the + `=` character. By default, whitespace is omitted, to be friendly to + some persnickety old parsers that don't tolerate it well. But some + find that it's more human-readable and pretty with the whitespace. + +For backwards compatibility reasons, if a `string` options is passed +in, then it is assumed to be the `section` value. + +### stringify(object, [options]) + +Alias for `encode(object, [options])` + +### safe(val) + +Escapes the string `val` such that it is safe to be used as a key or +value in an ini-file. Basically escapes quotes. For example + + ini.safe('"unsafe string"') + +would result in + + "\"unsafe string\"" + +### unsafe(val) + +Unescapes the string `val` diff --git a/node_modules/ini/ini.js b/node_modules/ini/ini.js new file mode 100644 index 00000000..590195dd --- /dev/null +++ b/node_modules/ini/ini.js @@ -0,0 +1,194 @@ +exports.parse = exports.decode = decode + +exports.stringify = exports.encode = encode + +exports.safe = safe +exports.unsafe = unsafe + +var eol = typeof process !== 'undefined' && + process.platform === 'win32' ? '\r\n' : '\n' + +function encode (obj, opt) { + var children = [] + var out = '' + + if (typeof opt === 'string') { + opt = { + section: opt, + whitespace: false + } + } else { + opt = opt || {} + opt.whitespace = opt.whitespace === true + } + + var separator = opt.whitespace ? ' = ' : '=' + + Object.keys(obj).forEach(function (k, _, __) { + var val = obj[k] + if (val && Array.isArray(val)) { + val.forEach(function (item) { + out += safe(k + '[]') + separator + safe(item) + '\n' + }) + } else if (val && typeof val === 'object') { + children.push(k) + } else { + out += safe(k) + separator + safe(val) + eol + } + }) + + if (opt.section && out.length) { + out = '[' + safe(opt.section) + ']' + eol + out + } + + children.forEach(function (k, _, __) { + var nk = dotSplit(k).join('\\.') + var section = (opt.section ? opt.section + '.' : '') + nk + var child = encode(obj[k], { + section: section, + whitespace: opt.whitespace + }) + if (out.length && child.length) { + out += eol + } + out += child + }) + + return out +} + +function dotSplit (str) { + return str.replace(/\1/g, '\u0002LITERAL\\1LITERAL\u0002') + .replace(/\\\./g, '\u0001') + .split(/\./).map(function (part) { + return part.replace(/\1/g, '\\.') + .replace(/\2LITERAL\\1LITERAL\2/g, '\u0001') + }) +} + +function decode (str) { + var out = {} + var p = out + var section = null + // section |key = value + var re = /^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i + var lines = str.split(/[\r\n]+/g) + + lines.forEach(function (line, _, __) { + if (!line || line.match(/^\s*[;#]/)) return + var match = line.match(re) + if (!match) return + if (match[1] !== undefined) { + section = unsafe(match[1]) + p = out[section] = out[section] || {} + return + } + var key = unsafe(match[2]) + var value = match[3] ? unsafe(match[4]) : true + switch (value) { + case 'true': + case 'false': + case 'null': value = JSON.parse(value) + } + + // Convert keys with '[]' suffix to an array + if (key.length > 2 && key.slice(-2) === '[]') { + key = key.substring(0, key.length - 2) + if (!p[key]) { + p[key] = [] + } else if (!Array.isArray(p[key])) { + p[key] = [p[key]] + } + } + + // safeguard against resetting a previously defined + // array by accidentally forgetting the brackets + if (Array.isArray(p[key])) { + p[key].push(value) + } else { + p[key] = value + } + }) + + // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}} + // use a filter to return the keys that have to be deleted. + Object.keys(out).filter(function (k, _, __) { + if (!out[k] || + typeof out[k] !== 'object' || + Array.isArray(out[k])) { + return false + } + // see if the parent section is also an object. + // if so, add it to that, and mark this one for deletion + var parts = dotSplit(k) + var p = out + var l = parts.pop() + var nl = l.replace(/\\\./g, '.') + parts.forEach(function (part, _, __) { + if (!p[part] || typeof p[part] !== 'object') p[part] = {} + p = p[part] + }) + if (p === out && nl === l) { + return false + } + p[nl] = out[k] + return true + }).forEach(function (del, _, __) { + delete out[del] + }) + + return out +} + +function isQuoted (val) { + return (val.charAt(0) === '"' && val.slice(-1) === '"') || + (val.charAt(0) === "'" && val.slice(-1) === "'") +} + +function safe (val) { + return (typeof val !== 'string' || + val.match(/[=\r\n]/) || + val.match(/^\[/) || + (val.length > 1 && + isQuoted(val)) || + val !== val.trim()) + ? JSON.stringify(val) + : val.replace(/;/g, '\\;').replace(/#/g, '\\#') +} + +function unsafe (val, doUnesc) { + val = (val || '').trim() + if (isQuoted(val)) { + // remove the single quotes before calling JSON.parse + if (val.charAt(0) === "'") { + val = val.substr(1, val.length - 2) + } + try { val = JSON.parse(val) } catch (_) {} + } else { + // walk the val to find the first not-escaped ; character + var esc = false + var unesc = '' + for (var i = 0, l = val.length; i < l; i++) { + var c = val.charAt(i) + if (esc) { + if ('\\;#'.indexOf(c) !== -1) { + unesc += c + } else { + unesc += '\\' + c + } + esc = false + } else if (';#'.indexOf(c) !== -1) { + break + } else if (c === '\\') { + esc = true + } else { + unesc += c + } + } + if (esc) { + unesc += '\\' + } + return unesc.trim() + } + return val +} diff --git a/node_modules/ini/package.json b/node_modules/ini/package.json new file mode 100644 index 00000000..269bc158 --- /dev/null +++ b/node_modules/ini/package.json @@ -0,0 +1,30 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "ini", + "description": "An ini encoder/decoder for node", + "version": "1.3.5", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/ini.git" + }, + "main": "ini.js", + "scripts": { + "pretest": "standard ini.js", + "test": "tap test/*.js --100 -J", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": {}, + "devDependencies": { + "standard": "^10.0.3", + "tap": "^10.7.3 || 11" + }, + "license": "ISC", + "files": [ + "ini.js" + ] +} diff --git a/node_modules/ipaddr.js/LICENSE b/node_modules/ipaddr.js/LICENSE new file mode 100644 index 00000000..f6b37b52 --- /dev/null +++ b/node_modules/ipaddr.js/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2011-2017 whitequark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/ipaddr.js/README.md b/node_modules/ipaddr.js/README.md new file mode 100644 index 00000000..6876a3b8 --- /dev/null +++ b/node_modules/ipaddr.js/README.md @@ -0,0 +1,233 @@ +# ipaddr.js — an IPv6 and IPv4 address manipulation library [![Build Status](https://travis-ci.org/whitequark/ipaddr.js.svg)](https://travis-ci.org/whitequark/ipaddr.js) + +ipaddr.js is a small (1.9K minified and gzipped) library for manipulating +IP addresses in JavaScript environments. It runs on both CommonJS runtimes +(e.g. [nodejs]) and in a web browser. + +ipaddr.js allows you to verify and parse string representation of an IP +address, match it against a CIDR range or range list, determine if it falls +into some reserved ranges (examples include loopback and private ranges), +and convert between IPv4 and IPv4-mapped IPv6 addresses. + +[nodejs]: http://nodejs.org + +## Installation + +`npm install ipaddr.js` + +or + +`bower install ipaddr.js` + +## API + +ipaddr.js defines one object in the global scope: `ipaddr`. In CommonJS, +it is exported from the module: + +```js +var ipaddr = require('ipaddr.js'); +``` + +The API consists of several global methods and two classes: ipaddr.IPv6 and ipaddr.IPv4. + +### Global methods + +There are three global methods defined: `ipaddr.isValid`, `ipaddr.parse` and +`ipaddr.process`. All of them receive a string as a single parameter. + +The `ipaddr.isValid` method returns `true` if the address is a valid IPv4 or +IPv6 address, and `false` otherwise. It does not throw any exceptions. + +The `ipaddr.parse` method returns an object representing the IP address, +or throws an `Error` if the passed string is not a valid representation of an +IP address. + +The `ipaddr.process` method works just like the `ipaddr.parse` one, but it +automatically converts IPv4-mapped IPv6 addresses to their IPv4 counterparts +before returning. It is useful when you have a Node.js instance listening +on an IPv6 socket, and the `net.ivp6.bindv6only` sysctl parameter (or its +equivalent on non-Linux OS) is set to 0. In this case, you can accept IPv4 +connections on your IPv6-only socket, but the remote address will be mangled. +Use `ipaddr.process` method to automatically demangle it. + +### Object representation + +Parsing methods return an object which descends from `ipaddr.IPv6` or +`ipaddr.IPv4`. These objects share some properties, but most of them differ. + +#### Shared properties + +One can determine the type of address by calling `addr.kind()`. It will return +either `"ipv6"` or `"ipv4"`. + +An address can be converted back to its string representation with `addr.toString()`. +Note that this method: + * does not return the original string used to create the object (in fact, there is + no way of getting that string) + * returns a compact representation (when it is applicable) + +A `match(range, bits)` method can be used to check if the address falls into a +certain CIDR range. +Note that an address can be (obviously) matched only against an address of the same type. + +For example: + +```js +var addr = ipaddr.parse("2001:db8:1234::1"); +var range = ipaddr.parse("2001:db8::"); + +addr.match(range, 32); // => true +``` + +Alternatively, `match` can also be called as `match([range, bits])`. In this way, +it can be used together with the `parseCIDR(string)` method, which parses an IP +address together with a CIDR range. + +For example: + +```js +var addr = ipaddr.parse("2001:db8:1234::1"); + +addr.match(ipaddr.parseCIDR("2001:db8::/32")); // => true +``` + +A `range()` method returns one of predefined names for several special ranges defined +by IP protocols. The exact names (and their respective CIDR ranges) can be looked up +in the source: [IPv6 ranges] and [IPv4 ranges]. Some common ones include `"unicast"` +(the default one) and `"reserved"`. + +You can match against your own range list by using +`ipaddr.subnetMatch(address, rangeList, defaultName)` method. It can work with a mix of IPv6 or IPv4 addresses, and accepts a name-to-subnet map as the range list. For example: + +```js +var rangeList = { + documentationOnly: [ ipaddr.parse('2001:db8::'), 32 ], + tunnelProviders: [ + [ ipaddr.parse('2001:470::'), 32 ], // he.net + [ ipaddr.parse('2001:5c0::'), 32 ] // freenet6 + ] +}; +ipaddr.subnetMatch(ipaddr.parse('2001:470:8:66::1'), rangeList, 'unknown'); // => "tunnelProviders" +``` + +The addresses can be converted to their byte representation with `toByteArray()`. +(Actually, JavaScript mostly does not know about byte buffers. They are emulated with +arrays of numbers, each in range of 0..255.) + +```js +var bytes = ipaddr.parse('2a00:1450:8007::68').toByteArray(); // ipv6.google.com +bytes // => [42, 0x00, 0x14, 0x50, 0x80, 0x07, 0x00, , 0x00, 0x68 ] +``` + +The `ipaddr.IPv4` and `ipaddr.IPv6` objects have some methods defined, too. All of them +have the same interface for both protocols, and are similar to global methods. + +`ipaddr.IPvX.isValid(string)` can be used to check if the string is a valid address +for particular protocol, and `ipaddr.IPvX.parse(string)` is the error-throwing parser. + +`ipaddr.IPvX.isValid(string)` uses the same format for parsing as the POSIX `inet_ntoa` function, which accepts unusual formats like `0xc0.168.1.1` or `0x10000000`. The function `ipaddr.IPv4.isValidFourPartDecimal(string)` validates the IPv4 address and also ensures that it is written in four-part decimal format. + +[IPv6 ranges]: https://github.com/whitequark/ipaddr.js/blob/master/src/ipaddr.coffee#L186 +[IPv4 ranges]: https://github.com/whitequark/ipaddr.js/blob/master/src/ipaddr.coffee#L71 + +#### IPv6 properties + +Sometimes you will want to convert IPv6 not to a compact string representation (with +the `::` substitution); the `toNormalizedString()` method will return an address where +all zeroes are explicit. + +For example: + +```js +var addr = ipaddr.parse("2001:0db8::0001"); +addr.toString(); // => "2001:db8::1" +addr.toNormalizedString(); // => "2001:db8:0:0:0:0:0:1" +``` + +The `isIPv4MappedAddress()` method will return `true` if this address is an IPv4-mapped +one, and `toIPv4Address()` will return an IPv4 object address. + +To access the underlying binary representation of the address, use `addr.parts`. + +```js +var addr = ipaddr.parse("2001:db8:10::1234:DEAD"); +addr.parts // => [0x2001, 0xdb8, 0x10, 0, 0, 0, 0x1234, 0xdead] +``` + +A IPv6 zone index can be accessed via `addr.zoneId`: + +```js +var addr = ipaddr.parse("2001:db8::%eth0"); +addr.zoneId // => 'eth0' +``` + +#### IPv4 properties + +`toIPv4MappedAddress()` will return a corresponding IPv4-mapped IPv6 address. + +To access the underlying representation of the address, use `addr.octets`. + +```js +var addr = ipaddr.parse("192.168.1.1"); +addr.octets // => [192, 168, 1, 1] +``` + +`prefixLengthFromSubnetMask()` will return a CIDR prefix length for a valid IPv4 netmask or +false if the netmask is not valid. + +```js +ipaddr.IPv4.parse('255.255.255.240').prefixLengthFromSubnetMask() == 28 +ipaddr.IPv4.parse('255.192.164.0').prefixLengthFromSubnetMask() == null +``` + +`subnetMaskFromPrefixLength()` will return an IPv4 netmask for a valid CIDR prefix length. + +```js +ipaddr.IPv4.subnetMaskFromPrefixLength(24) == "255.255.255.0" +ipaddr.IPv4.subnetMaskFromPrefixLength(29) == "255.255.255.248" +``` + +`broadcastAddressFromCIDR()` will return the broadcast address for a given IPv4 interface and netmask in CIDR notation. +```js +ipaddr.IPv4.broadcastAddressFromCIDR("172.0.0.1/24") == "172.0.0.255" +``` +`networkAddressFromCIDR()` will return the network address for a given IPv4 interface and netmask in CIDR notation. +```js +ipaddr.IPv4.networkAddressFromCIDR("172.0.0.1/24") == "172.0.0.0" +``` + +#### Conversion + +IPv4 and IPv6 can be converted bidirectionally to and from network byte order (MSB) byte arrays. + +The `fromByteArray()` method will take an array and create an appropriate IPv4 or IPv6 object +if the input satisfies the requirements. For IPv4 it has to be an array of four 8-bit values, +while for IPv6 it has to be an array of sixteen 8-bit values. + +For example: +```js +var addr = ipaddr.fromByteArray([0x7f, 0, 0, 1]); +addr.toString(); // => "127.0.0.1" +``` + +or + +```js +var addr = ipaddr.fromByteArray([0x20, 1, 0xd, 0xb8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]) +addr.toString(); // => "2001:db8::1" +``` + +Both objects also offer a `toByteArray()` method, which returns an array in network byte order (MSB). + +For example: +```js +var addr = ipaddr.parse("127.0.0.1"); +addr.toByteArray(); // => [0x7f, 0, 0, 1] +``` + +or + +```js +var addr = ipaddr.parse("2001:db8::1"); +addr.toByteArray(); // => [0x20, 1, 0xd, 0xb8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1] +``` diff --git a/node_modules/ipaddr.js/ipaddr.min.js b/node_modules/ipaddr.js/ipaddr.min.js new file mode 100644 index 00000000..b54a7cc4 --- /dev/null +++ b/node_modules/ipaddr.js/ipaddr.min.js @@ -0,0 +1 @@ +(function(){var r,t,n,e,i,o,a,s;t={},s=this,"undefined"!=typeof module&&null!==module&&module.exports?module.exports=t:s.ipaddr=t,a=function(r,t,n,e){var i,o;if(r.length!==t.length)throw new Error("ipaddr: cannot match CIDR for objects with different lengths");for(i=0;e>0;){if((o=n-e)<0&&(o=0),r[i]>>o!=t[i]>>o)return!1;e-=n,i+=1}return!0},t.subnetMatch=function(r,t,n){var e,i,o,a,s;null==n&&(n="unicast");for(o in t)for(!(a=t[o])[0]||a[0]instanceof Array||(a=[a]),e=0,i=a.length;e=0;t=n+=-1){if(!((e=this.octets[t])in a))return null;if(o=a[e],i&&0!==o)return null;8!==o&&(i=!0),r+=o}return 32-r},r}(),n="(0?\\d+|0x[a-f0-9]+)",e={fourOctet:new RegExp("^"+n+"\\."+n+"\\."+n+"\\."+n+"$","i"),longValue:new RegExp("^"+n+"$","i")},t.IPv4.parser=function(r){var t,n,i,o,a;if(n=function(r){return"0"===r[0]&&"x"!==r[1]?parseInt(r,8):parseInt(r)},t=r.match(e.fourOctet))return function(){var r,e,o,a;for(a=[],r=0,e=(o=t.slice(1,6)).length;r4294967295||a<0)throw new Error("ipaddr: address outside defined range");return function(){var r,t;for(t=[],o=r=0;r<=24;o=r+=8)t.push(a>>o&255);return t}().reverse()}return null},t.IPv6=function(){function r(r,t){var n,e,i,o,a,s;if(16===r.length)for(this.parts=[],n=e=0;e<=14;n=e+=2)this.parts.push(r[n]<<8|r[n+1]);else{if(8!==r.length)throw new Error("ipaddr: ipv6 part count should be 8 or 16");this.parts=r}for(i=0,o=(s=this.parts).length;it&&(r=n.index,t=n[0].length);return t<0?i:i.substring(0,r)+"::"+i.substring(r+t)},r.prototype.toByteArray=function(){var r,t,n,e,i;for(r=[],t=0,n=(i=this.parts).length;t>8),r.push(255&e);return r},r.prototype.toNormalizedString=function(){var r,t,n;return r=function(){var r,n,e,i;for(i=[],r=0,n=(e=this.parts).length;r>8,255&r,n>>8,255&n])},r.prototype.prefixLengthFromSubnetMask=function(){var r,t,n,e,i,o,a;for(a={0:16,32768:15,49152:14,57344:13,61440:12,63488:11,64512:10,65024:9,65280:8,65408:7,65472:6,65504:5,65520:4,65528:3,65532:2,65534:1,65535:0},r=0,i=!1,t=n=7;n>=0;t=n+=-1){if(!((e=this.parts[t])in a))return null;if(o=a[e],i&&0!==o)return null;16!==o&&(i=!0),r+=o}return 128-r},r}(),i="(?:[0-9a-f]+::?)+",o={zoneIndex:new RegExp("%[0-9a-z]{1,}","i"),native:new RegExp("^(::)?("+i+")?([0-9a-f]+)?(::)?(%[0-9a-z]{1,})?$","i"),transitional:new RegExp("^((?:"+i+")|(?:::)(?:"+i+")?)"+n+"\\."+n+"\\."+n+"\\."+n+"(%[0-9a-z]{1,})?$","i")},r=function(r,t){var n,e,i,a,s,p;if(r.indexOf("::")!==r.lastIndexOf("::"))return null;for((p=(r.match(o.zoneIndex)||[])[0])&&(p=p.substring(1),r=r.replace(/%.+$/,"")),n=0,e=-1;(e=r.indexOf(":",e+1))>=0;)n++;if("::"===r.substr(0,2)&&n--,"::"===r.substr(-2,2)&&n--,n>t)return null;for(s=t-n,a=":";s--;)a+="0:";return":"===(r=r.replace("::",a))[0]&&(r=r.slice(1)),":"===r[r.length-1]&&(r=r.slice(0,-1)),t=function(){var t,n,e,o;for(o=[],t=0,n=(e=r.split(":")).length;t=0&&t<=32)return e=[this.parse(n[1]),t],Object.defineProperty(e,"toString",{value:function(){return this.join("/")}}),e;throw new Error("ipaddr: string is not formatted like an IPv4 CIDR range")},t.IPv4.subnetMaskFromPrefixLength=function(r){var t,n,e;if((r=parseInt(r))<0||r>32)throw new Error("ipaddr: invalid IPv4 prefix length");for(e=[0,0,0,0],n=0,t=Math.floor(r/8);n=0&&t<=128)return e=[this.parse(n[1]),t],Object.defineProperty(e,"toString",{value:function(){return this.join("/")}}),e;throw new Error("ipaddr: string is not formatted like an IPv6 CIDR range")},t.isValid=function(r){return t.IPv6.isValid(r)||t.IPv4.isValid(r)},t.parse=function(r){if(t.IPv6.isValid(r))return t.IPv6.parse(r);if(t.IPv4.isValid(r))return t.IPv4.parse(r);throw new Error("ipaddr: the address has neither IPv6 nor IPv4 format")},t.parseCIDR=function(r){try{return t.IPv6.parseCIDR(r)}catch(n){n;try{return t.IPv4.parseCIDR(r)}catch(r){throw r,new Error("ipaddr: the address has neither IPv6 nor IPv4 CIDR format")}}},t.fromByteArray=function(r){var n;if(4===(n=r.length))return new t.IPv4(r);if(16===n)return new t.IPv6(r);throw new Error("ipaddr: the binary input is neither an IPv6 nor IPv4 address")},t.process=function(r){var t;return t=this.parse(r),"ipv6"===t.kind()&&t.isIPv4MappedAddress()?t.toIPv4Address():t}}).call(this); \ No newline at end of file diff --git a/node_modules/ipaddr.js/lib/ipaddr.js b/node_modules/ipaddr.js/lib/ipaddr.js new file mode 100644 index 00000000..18bd93b5 --- /dev/null +++ b/node_modules/ipaddr.js/lib/ipaddr.js @@ -0,0 +1,673 @@ +(function() { + var expandIPv6, ipaddr, ipv4Part, ipv4Regexes, ipv6Part, ipv6Regexes, matchCIDR, root, zoneIndex; + + ipaddr = {}; + + root = this; + + if ((typeof module !== "undefined" && module !== null) && module.exports) { + module.exports = ipaddr; + } else { + root['ipaddr'] = ipaddr; + } + + matchCIDR = function(first, second, partSize, cidrBits) { + var part, shift; + if (first.length !== second.length) { + throw new Error("ipaddr: cannot match CIDR for objects with different lengths"); + } + part = 0; + while (cidrBits > 0) { + shift = partSize - cidrBits; + if (shift < 0) { + shift = 0; + } + if (first[part] >> shift !== second[part] >> shift) { + return false; + } + cidrBits -= partSize; + part += 1; + } + return true; + }; + + ipaddr.subnetMatch = function(address, rangeList, defaultName) { + var k, len, rangeName, rangeSubnets, subnet; + if (defaultName == null) { + defaultName = 'unicast'; + } + for (rangeName in rangeList) { + rangeSubnets = rangeList[rangeName]; + if (rangeSubnets[0] && !(rangeSubnets[0] instanceof Array)) { + rangeSubnets = [rangeSubnets]; + } + for (k = 0, len = rangeSubnets.length; k < len; k++) { + subnet = rangeSubnets[k]; + if (address.kind() === subnet[0].kind()) { + if (address.match.apply(address, subnet)) { + return rangeName; + } + } + } + } + return defaultName; + }; + + ipaddr.IPv4 = (function() { + function IPv4(octets) { + var k, len, octet; + if (octets.length !== 4) { + throw new Error("ipaddr: ipv4 octet count should be 4"); + } + for (k = 0, len = octets.length; k < len; k++) { + octet = octets[k]; + if (!((0 <= octet && octet <= 255))) { + throw new Error("ipaddr: ipv4 octet should fit in 8 bits"); + } + } + this.octets = octets; + } + + IPv4.prototype.kind = function() { + return 'ipv4'; + }; + + IPv4.prototype.toString = function() { + return this.octets.join("."); + }; + + IPv4.prototype.toNormalizedString = function() { + return this.toString(); + }; + + IPv4.prototype.toByteArray = function() { + return this.octets.slice(0); + }; + + IPv4.prototype.match = function(other, cidrRange) { + var ref; + if (cidrRange === void 0) { + ref = other, other = ref[0], cidrRange = ref[1]; + } + if (other.kind() !== 'ipv4') { + throw new Error("ipaddr: cannot match ipv4 address with non-ipv4 one"); + } + return matchCIDR(this.octets, other.octets, 8, cidrRange); + }; + + IPv4.prototype.SpecialRanges = { + unspecified: [[new IPv4([0, 0, 0, 0]), 8]], + broadcast: [[new IPv4([255, 255, 255, 255]), 32]], + multicast: [[new IPv4([224, 0, 0, 0]), 4]], + linkLocal: [[new IPv4([169, 254, 0, 0]), 16]], + loopback: [[new IPv4([127, 0, 0, 0]), 8]], + carrierGradeNat: [[new IPv4([100, 64, 0, 0]), 10]], + "private": [[new IPv4([10, 0, 0, 0]), 8], [new IPv4([172, 16, 0, 0]), 12], [new IPv4([192, 168, 0, 0]), 16]], + reserved: [[new IPv4([192, 0, 0, 0]), 24], [new IPv4([192, 0, 2, 0]), 24], [new IPv4([192, 88, 99, 0]), 24], [new IPv4([198, 51, 100, 0]), 24], [new IPv4([203, 0, 113, 0]), 24], [new IPv4([240, 0, 0, 0]), 4]] + }; + + IPv4.prototype.range = function() { + return ipaddr.subnetMatch(this, this.SpecialRanges); + }; + + IPv4.prototype.toIPv4MappedAddress = function() { + return ipaddr.IPv6.parse("::ffff:" + (this.toString())); + }; + + IPv4.prototype.prefixLengthFromSubnetMask = function() { + var cidr, i, k, octet, stop, zeros, zerotable; + zerotable = { + 0: 8, + 128: 7, + 192: 6, + 224: 5, + 240: 4, + 248: 3, + 252: 2, + 254: 1, + 255: 0 + }; + cidr = 0; + stop = false; + for (i = k = 3; k >= 0; i = k += -1) { + octet = this.octets[i]; + if (octet in zerotable) { + zeros = zerotable[octet]; + if (stop && zeros !== 0) { + return null; + } + if (zeros !== 8) { + stop = true; + } + cidr += zeros; + } else { + return null; + } + } + return 32 - cidr; + }; + + return IPv4; + + })(); + + ipv4Part = "(0?\\d+|0x[a-f0-9]+)"; + + ipv4Regexes = { + fourOctet: new RegExp("^" + ipv4Part + "\\." + ipv4Part + "\\." + ipv4Part + "\\." + ipv4Part + "$", 'i'), + longValue: new RegExp("^" + ipv4Part + "$", 'i') + }; + + ipaddr.IPv4.parser = function(string) { + var match, parseIntAuto, part, shift, value; + parseIntAuto = function(string) { + if (string[0] === "0" && string[1] !== "x") { + return parseInt(string, 8); + } else { + return parseInt(string); + } + }; + if (match = string.match(ipv4Regexes.fourOctet)) { + return (function() { + var k, len, ref, results; + ref = match.slice(1, 6); + results = []; + for (k = 0, len = ref.length; k < len; k++) { + part = ref[k]; + results.push(parseIntAuto(part)); + } + return results; + })(); + } else if (match = string.match(ipv4Regexes.longValue)) { + value = parseIntAuto(match[1]); + if (value > 0xffffffff || value < 0) { + throw new Error("ipaddr: address outside defined range"); + } + return ((function() { + var k, results; + results = []; + for (shift = k = 0; k <= 24; shift = k += 8) { + results.push((value >> shift) & 0xff); + } + return results; + })()).reverse(); + } else { + return null; + } + }; + + ipaddr.IPv6 = (function() { + function IPv6(parts, zoneId) { + var i, k, l, len, part, ref; + if (parts.length === 16) { + this.parts = []; + for (i = k = 0; k <= 14; i = k += 2) { + this.parts.push((parts[i] << 8) | parts[i + 1]); + } + } else if (parts.length === 8) { + this.parts = parts; + } else { + throw new Error("ipaddr: ipv6 part count should be 8 or 16"); + } + ref = this.parts; + for (l = 0, len = ref.length; l < len; l++) { + part = ref[l]; + if (!((0 <= part && part <= 0xffff))) { + throw new Error("ipaddr: ipv6 part should fit in 16 bits"); + } + } + if (zoneId) { + this.zoneId = zoneId; + } + } + + IPv6.prototype.kind = function() { + return 'ipv6'; + }; + + IPv6.prototype.toString = function() { + return this.toNormalizedString().replace(/((^|:)(0(:|$))+)/, '::'); + }; + + IPv6.prototype.toRFC5952String = function() { + var bestMatchIndex, bestMatchLength, match, regex, string; + regex = /((^|:)(0(:|$)){2,})/g; + string = this.toNormalizedString(); + bestMatchIndex = 0; + bestMatchLength = -1; + while ((match = regex.exec(string))) { + if (match[0].length > bestMatchLength) { + bestMatchIndex = match.index; + bestMatchLength = match[0].length; + } + } + if (bestMatchLength < 0) { + return string; + } + return string.substring(0, bestMatchIndex) + '::' + string.substring(bestMatchIndex + bestMatchLength); + }; + + IPv6.prototype.toByteArray = function() { + var bytes, k, len, part, ref; + bytes = []; + ref = this.parts; + for (k = 0, len = ref.length; k < len; k++) { + part = ref[k]; + bytes.push(part >> 8); + bytes.push(part & 0xff); + } + return bytes; + }; + + IPv6.prototype.toNormalizedString = function() { + var addr, part, suffix; + addr = ((function() { + var k, len, ref, results; + ref = this.parts; + results = []; + for (k = 0, len = ref.length; k < len; k++) { + part = ref[k]; + results.push(part.toString(16)); + } + return results; + }).call(this)).join(":"); + suffix = ''; + if (this.zoneId) { + suffix = '%' + this.zoneId; + } + return addr + suffix; + }; + + IPv6.prototype.toFixedLengthString = function() { + var addr, part, suffix; + addr = ((function() { + var k, len, ref, results; + ref = this.parts; + results = []; + for (k = 0, len = ref.length; k < len; k++) { + part = ref[k]; + results.push(part.toString(16).padStart(4, '0')); + } + return results; + }).call(this)).join(":"); + suffix = ''; + if (this.zoneId) { + suffix = '%' + this.zoneId; + } + return addr + suffix; + }; + + IPv6.prototype.match = function(other, cidrRange) { + var ref; + if (cidrRange === void 0) { + ref = other, other = ref[0], cidrRange = ref[1]; + } + if (other.kind() !== 'ipv6') { + throw new Error("ipaddr: cannot match ipv6 address with non-ipv6 one"); + } + return matchCIDR(this.parts, other.parts, 16, cidrRange); + }; + + IPv6.prototype.SpecialRanges = { + unspecified: [new IPv6([0, 0, 0, 0, 0, 0, 0, 0]), 128], + linkLocal: [new IPv6([0xfe80, 0, 0, 0, 0, 0, 0, 0]), 10], + multicast: [new IPv6([0xff00, 0, 0, 0, 0, 0, 0, 0]), 8], + loopback: [new IPv6([0, 0, 0, 0, 0, 0, 0, 1]), 128], + uniqueLocal: [new IPv6([0xfc00, 0, 0, 0, 0, 0, 0, 0]), 7], + ipv4Mapped: [new IPv6([0, 0, 0, 0, 0, 0xffff, 0, 0]), 96], + rfc6145: [new IPv6([0, 0, 0, 0, 0xffff, 0, 0, 0]), 96], + rfc6052: [new IPv6([0x64, 0xff9b, 0, 0, 0, 0, 0, 0]), 96], + '6to4': [new IPv6([0x2002, 0, 0, 0, 0, 0, 0, 0]), 16], + teredo: [new IPv6([0x2001, 0, 0, 0, 0, 0, 0, 0]), 32], + reserved: [[new IPv6([0x2001, 0xdb8, 0, 0, 0, 0, 0, 0]), 32]] + }; + + IPv6.prototype.range = function() { + return ipaddr.subnetMatch(this, this.SpecialRanges); + }; + + IPv6.prototype.isIPv4MappedAddress = function() { + return this.range() === 'ipv4Mapped'; + }; + + IPv6.prototype.toIPv4Address = function() { + var high, low, ref; + if (!this.isIPv4MappedAddress()) { + throw new Error("ipaddr: trying to convert a generic ipv6 address to ipv4"); + } + ref = this.parts.slice(-2), high = ref[0], low = ref[1]; + return new ipaddr.IPv4([high >> 8, high & 0xff, low >> 8, low & 0xff]); + }; + + IPv6.prototype.prefixLengthFromSubnetMask = function() { + var cidr, i, k, part, stop, zeros, zerotable; + zerotable = { + 0: 16, + 32768: 15, + 49152: 14, + 57344: 13, + 61440: 12, + 63488: 11, + 64512: 10, + 65024: 9, + 65280: 8, + 65408: 7, + 65472: 6, + 65504: 5, + 65520: 4, + 65528: 3, + 65532: 2, + 65534: 1, + 65535: 0 + }; + cidr = 0; + stop = false; + for (i = k = 7; k >= 0; i = k += -1) { + part = this.parts[i]; + if (part in zerotable) { + zeros = zerotable[part]; + if (stop && zeros !== 0) { + return null; + } + if (zeros !== 16) { + stop = true; + } + cidr += zeros; + } else { + return null; + } + } + return 128 - cidr; + }; + + return IPv6; + + })(); + + ipv6Part = "(?:[0-9a-f]+::?)+"; + + zoneIndex = "%[0-9a-z]{1,}"; + + ipv6Regexes = { + zoneIndex: new RegExp(zoneIndex, 'i'), + "native": new RegExp("^(::)?(" + ipv6Part + ")?([0-9a-f]+)?(::)?(" + zoneIndex + ")?$", 'i'), + transitional: new RegExp(("^((?:" + ipv6Part + ")|(?:::)(?:" + ipv6Part + ")?)") + (ipv4Part + "\\." + ipv4Part + "\\." + ipv4Part + "\\." + ipv4Part) + ("(" + zoneIndex + ")?$"), 'i') + }; + + expandIPv6 = function(string, parts) { + var colonCount, lastColon, part, replacement, replacementCount, zoneId; + if (string.indexOf('::') !== string.lastIndexOf('::')) { + return null; + } + zoneId = (string.match(ipv6Regexes['zoneIndex']) || [])[0]; + if (zoneId) { + zoneId = zoneId.substring(1); + string = string.replace(/%.+$/, ''); + } + colonCount = 0; + lastColon = -1; + while ((lastColon = string.indexOf(':', lastColon + 1)) >= 0) { + colonCount++; + } + if (string.substr(0, 2) === '::') { + colonCount--; + } + if (string.substr(-2, 2) === '::') { + colonCount--; + } + if (colonCount > parts) { + return null; + } + replacementCount = parts - colonCount; + replacement = ':'; + while (replacementCount--) { + replacement += '0:'; + } + string = string.replace('::', replacement); + if (string[0] === ':') { + string = string.slice(1); + } + if (string[string.length - 1] === ':') { + string = string.slice(0, -1); + } + parts = (function() { + var k, len, ref, results; + ref = string.split(":"); + results = []; + for (k = 0, len = ref.length; k < len; k++) { + part = ref[k]; + results.push(parseInt(part, 16)); + } + return results; + })(); + return { + parts: parts, + zoneId: zoneId + }; + }; + + ipaddr.IPv6.parser = function(string) { + var addr, k, len, match, octet, octets, zoneId; + if (ipv6Regexes['native'].test(string)) { + return expandIPv6(string, 8); + } else if (match = string.match(ipv6Regexes['transitional'])) { + zoneId = match[6] || ''; + addr = expandIPv6(match[1].slice(0, -1) + zoneId, 6); + if (addr.parts) { + octets = [parseInt(match[2]), parseInt(match[3]), parseInt(match[4]), parseInt(match[5])]; + for (k = 0, len = octets.length; k < len; k++) { + octet = octets[k]; + if (!((0 <= octet && octet <= 255))) { + return null; + } + } + addr.parts.push(octets[0] << 8 | octets[1]); + addr.parts.push(octets[2] << 8 | octets[3]); + return { + parts: addr.parts, + zoneId: addr.zoneId + }; + } + } + return null; + }; + + ipaddr.IPv4.isIPv4 = ipaddr.IPv6.isIPv6 = function(string) { + return this.parser(string) !== null; + }; + + ipaddr.IPv4.isValid = function(string) { + var e; + try { + new this(this.parser(string)); + return true; + } catch (error1) { + e = error1; + return false; + } + }; + + ipaddr.IPv4.isValidFourPartDecimal = function(string) { + if (ipaddr.IPv4.isValid(string) && string.match(/^(0|[1-9]\d*)(\.(0|[1-9]\d*)){3}$/)) { + return true; + } else { + return false; + } + }; + + ipaddr.IPv6.isValid = function(string) { + var addr, e; + if (typeof string === "string" && string.indexOf(":") === -1) { + return false; + } + try { + addr = this.parser(string); + new this(addr.parts, addr.zoneId); + return true; + } catch (error1) { + e = error1; + return false; + } + }; + + ipaddr.IPv4.parse = function(string) { + var parts; + parts = this.parser(string); + if (parts === null) { + throw new Error("ipaddr: string is not formatted like ip address"); + } + return new this(parts); + }; + + ipaddr.IPv6.parse = function(string) { + var addr; + addr = this.parser(string); + if (addr.parts === null) { + throw new Error("ipaddr: string is not formatted like ip address"); + } + return new this(addr.parts, addr.zoneId); + }; + + ipaddr.IPv4.parseCIDR = function(string) { + var maskLength, match, parsed; + if (match = string.match(/^(.+)\/(\d+)$/)) { + maskLength = parseInt(match[2]); + if (maskLength >= 0 && maskLength <= 32) { + parsed = [this.parse(match[1]), maskLength]; + Object.defineProperty(parsed, 'toString', { + value: function() { + return this.join('/'); + } + }); + return parsed; + } + } + throw new Error("ipaddr: string is not formatted like an IPv4 CIDR range"); + }; + + ipaddr.IPv4.subnetMaskFromPrefixLength = function(prefix) { + var filledOctetCount, j, octets; + prefix = parseInt(prefix); + if (prefix < 0 || prefix > 32) { + throw new Error('ipaddr: invalid IPv4 prefix length'); + } + octets = [0, 0, 0, 0]; + j = 0; + filledOctetCount = Math.floor(prefix / 8); + while (j < filledOctetCount) { + octets[j] = 255; + j++; + } + if (filledOctetCount < 4) { + octets[filledOctetCount] = Math.pow(2, prefix % 8) - 1 << 8 - (prefix % 8); + } + return new this(octets); + }; + + ipaddr.IPv4.broadcastAddressFromCIDR = function(string) { + var cidr, error, i, ipInterfaceOctets, octets, subnetMaskOctets; + try { + cidr = this.parseCIDR(string); + ipInterfaceOctets = cidr[0].toByteArray(); + subnetMaskOctets = this.subnetMaskFromPrefixLength(cidr[1]).toByteArray(); + octets = []; + i = 0; + while (i < 4) { + octets.push(parseInt(ipInterfaceOctets[i], 10) | parseInt(subnetMaskOctets[i], 10) ^ 255); + i++; + } + return new this(octets); + } catch (error1) { + error = error1; + throw new Error('ipaddr: the address does not have IPv4 CIDR format'); + } + }; + + ipaddr.IPv4.networkAddressFromCIDR = function(string) { + var cidr, error, i, ipInterfaceOctets, octets, subnetMaskOctets; + try { + cidr = this.parseCIDR(string); + ipInterfaceOctets = cidr[0].toByteArray(); + subnetMaskOctets = this.subnetMaskFromPrefixLength(cidr[1]).toByteArray(); + octets = []; + i = 0; + while (i < 4) { + octets.push(parseInt(ipInterfaceOctets[i], 10) & parseInt(subnetMaskOctets[i], 10)); + i++; + } + return new this(octets); + } catch (error1) { + error = error1; + throw new Error('ipaddr: the address does not have IPv4 CIDR format'); + } + }; + + ipaddr.IPv6.parseCIDR = function(string) { + var maskLength, match, parsed; + if (match = string.match(/^(.+)\/(\d+)$/)) { + maskLength = parseInt(match[2]); + if (maskLength >= 0 && maskLength <= 128) { + parsed = [this.parse(match[1]), maskLength]; + Object.defineProperty(parsed, 'toString', { + value: function() { + return this.join('/'); + } + }); + return parsed; + } + } + throw new Error("ipaddr: string is not formatted like an IPv6 CIDR range"); + }; + + ipaddr.isValid = function(string) { + return ipaddr.IPv6.isValid(string) || ipaddr.IPv4.isValid(string); + }; + + ipaddr.parse = function(string) { + if (ipaddr.IPv6.isValid(string)) { + return ipaddr.IPv6.parse(string); + } else if (ipaddr.IPv4.isValid(string)) { + return ipaddr.IPv4.parse(string); + } else { + throw new Error("ipaddr: the address has neither IPv6 nor IPv4 format"); + } + }; + + ipaddr.parseCIDR = function(string) { + var e; + try { + return ipaddr.IPv6.parseCIDR(string); + } catch (error1) { + e = error1; + try { + return ipaddr.IPv4.parseCIDR(string); + } catch (error1) { + e = error1; + throw new Error("ipaddr: the address has neither IPv6 nor IPv4 CIDR format"); + } + } + }; + + ipaddr.fromByteArray = function(bytes) { + var length; + length = bytes.length; + if (length === 4) { + return new ipaddr.IPv4(bytes); + } else if (length === 16) { + return new ipaddr.IPv6(bytes); + } else { + throw new Error("ipaddr: the binary input is neither an IPv6 nor IPv4 address"); + } + }; + + ipaddr.process = function(string) { + var addr; + addr = this.parse(string); + if (addr.kind() === 'ipv6' && addr.isIPv4MappedAddress()) { + return addr.toIPv4Address(); + } else { + return addr; + } + }; + +}).call(this); diff --git a/node_modules/ipaddr.js/lib/ipaddr.js.d.ts b/node_modules/ipaddr.js/lib/ipaddr.js.d.ts new file mode 100644 index 00000000..ef01ac4f --- /dev/null +++ b/node_modules/ipaddr.js/lib/ipaddr.js.d.ts @@ -0,0 +1,71 @@ + + +declare module "ipaddr.js" { + + type IPv4Range = 'unicast' | 'unspecified' | 'broadcast' | 'multicast' | 'linkLocal' | 'loopback' | 'carrierGradeNat' | 'private' | 'reserved'; + type IPv6Range = 'unicast' | 'unspecified' | 'linkLocal' | 'multicast' | 'loopback' | 'uniqueLocal' | 'ipv4Mapped' | 'rfc6145' | 'rfc6052' | '6to4' | 'teredo' | 'reserved'; + + interface RangeList { + [name: string]: [T, number] | [T, number][]; + } + + + // Common methods/properties for IPv4 and IPv6 classes. + class IP { + + prefixLengthFromSubnetMask(): number | false; + toByteArray(): number[]; + toNormalizedString(): string; + toString(): string; + } + + namespace Address { + + export function isValid(addr: string): boolean; + export function fromByteArray(bytes: number[]): IPv4 | IPv6; + export function parse(addr: string): IPv4 | IPv6; + export function parseCIDR(mask: string): [IPv4 | IPv6, number]; + export function process(address: string): IPv4 | IPv6; + export function subnetMatch(addr: IPv4, rangeList: RangeList, defaultName?: string): string; + export function subnetMatch(addr: IPv6, rangeList: RangeList, defaultName?: string): string; + + export class IPv4 extends IP { + static broadcastAddressFromCIDR(addr: string): IPv4; + static isIPv4(addr: string): boolean; + static isValidFourPartDecimal(addr: string): boolean; + static isValid(addr: string): boolean; + static networkAddressFromCIDR(addr: string): IPv4; + static parse(addr: string): IPv4; + static parseCIDR(addr: string): [IPv4, number]; + static subnetMaskFromPrefixLength(prefix: number): IPv4; + constructor(octets: number[]); + + kind(): 'ipv4'; + match(addr: IPv4, bits: number): boolean; + match(mask: [IPv4, number]): boolean; + range(): IPv4Range; + subnetMatch(rangeList: RangeList, defaultName?: string): string; + toIPv4MappedAddress(): IPv6; + } + + export class IPv6 extends IP { + static broadcastAddressFromCIDR(addr: string): IPv6; + static isIPv6(addr: string): boolean; + static isValid(addr: string): boolean; + static parse(addr: string): IPv6; + static parseCIDR(addr: string): [IPv6, number]; + static subnetMaskFromPrefixLength(prefix: number): IPv6; + constructor(octets: number[]); + + isIPv4MappedAddress(): boolean; + kind(): 'ipv6'; + match(addr: IPv6, bits: number): boolean; + match(mask: [IPv6, number]): boolean; + range(): IPv6Range; + subnetMatch(rangeList: RangeList, defaultName?: string): string; + toIPv4Address(): IPv4; + } + } + + export = Address; +} diff --git a/node_modules/ipaddr.js/package.json b/node_modules/ipaddr.js/package.json new file mode 100644 index 00000000..7e092f17 --- /dev/null +++ b/node_modules/ipaddr.js/package.json @@ -0,0 +1,34 @@ +{ + "name": "ipaddr.js", + "description": "A library for manipulating IPv4 and IPv6 addresses in JavaScript.", + "version": "1.9.0", + "author": "whitequark ", + "directories": { + "lib": "./lib" + }, + "dependencies": {}, + "devDependencies": { + "coffee-script": "~1.12.6", + "uglify-js": "~3.0.19", + "nodeunit": ">=0.8.2 <0.8.7" + }, + "scripts": { + "test": "cake build test" + }, + "files": [ + "lib/", + "ipaddr.min.js" + ], + "keywords": [ + "ip", + "ipv4", + "ipv6" + ], + "repository": "git://github.com/whitequark/ipaddr.js", + "main": "./lib/ipaddr.js", + "engines": { + "node": ">= 0.10" + }, + "license": "MIT", + "types": "./lib/ipaddr.js.d.ts" +} diff --git a/node_modules/is-accessor-descriptor/LICENSE b/node_modules/is-accessor-descriptor/LICENSE new file mode 100644 index 00000000..e33d14b7 --- /dev/null +++ b/node_modules/is-accessor-descriptor/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-accessor-descriptor/README.md b/node_modules/is-accessor-descriptor/README.md new file mode 100644 index 00000000..d198e1f0 --- /dev/null +++ b/node_modules/is-accessor-descriptor/README.md @@ -0,0 +1,144 @@ +# is-accessor-descriptor [![NPM version](https://img.shields.io/npm/v/is-accessor-descriptor.svg?style=flat)](https://www.npmjs.com/package/is-accessor-descriptor) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-accessor-descriptor.svg?style=flat)](https://npmjs.org/package/is-accessor-descriptor) [![NPM total downloads](https://img.shields.io/npm/dt/is-accessor-descriptor.svg?style=flat)](https://npmjs.org/package/is-accessor-descriptor) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-accessor-descriptor.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-accessor-descriptor) + +> Returns true if a value has the characteristics of a valid JavaScript accessor descriptor. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-accessor-descriptor +``` + +## Usage + +```js +var isAccessor = require('is-accessor-descriptor'); + +isAccessor({get: function() {}}); +//=> true +``` + +You may also pass an object and property name to check if the property is an accessor: + +```js +isAccessor(foo, 'bar'); +``` + +## Examples + +`false` when not an object + +```js +isAccessor('a') +isAccessor(null) +isAccessor([]) +//=> false +``` + +`true` when the object has valid properties + +and the properties all have the correct JavaScript types: + +```js +isAccessor({get: noop, set: noop}) +isAccessor({get: noop}) +isAccessor({set: noop}) +//=> true +``` + +`false` when the object has invalid properties + +```js +isAccessor({get: noop, set: noop, bar: 'baz'}) +isAccessor({get: noop, writable: true}) +isAccessor({get: noop, value: true}) +//=> false +``` + +`false` when an accessor is not a function + +```js +isAccessor({get: noop, set: 'baz'}) +isAccessor({get: 'foo', set: noop}) +isAccessor({get: 'foo', bar: 'baz'}) +isAccessor({get: 'foo', set: 'baz'}) +//=> false +``` + +`false` when a value is not the correct type + +```js +isAccessor({get: noop, set: noop, enumerable: 'foo'}) +isAccessor({set: noop, configurable: 'foo'}) +isAccessor({get: noop, configurable: 'foo'}) +//=> false +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-accessor-descriptor](https://www.npmjs.com/package/is-accessor-descriptor): Returns true if a value has the characteristics of a valid JavaScript accessor descriptor. | [homepage](https://github.com/jonschlinkert/is-accessor-descriptor "Returns true if a value has the characteristics of a valid JavaScript accessor descriptor.") +* [is-data-descriptor](https://www.npmjs.com/package/is-data-descriptor): Returns true if a value has the characteristics of a valid JavaScript data descriptor. | [homepage](https://github.com/jonschlinkert/is-data-descriptor "Returns true if a value has the characteristics of a valid JavaScript data descriptor.") +* [is-descriptor](https://www.npmjs.com/package/is-descriptor): Returns true if a value has the characteristics of a valid JavaScript descriptor. Works for… [more](https://github.com/jonschlinkert/is-descriptor) | [homepage](https://github.com/jonschlinkert/is-descriptor "Returns true if a value has the characteristics of a valid JavaScript descriptor. Works for data descriptors and accessor descriptors.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 22 | [jonschlinkert](https://github.com/jonschlinkert) | +| 2 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on November 01, 2017._ \ No newline at end of file diff --git a/node_modules/is-accessor-descriptor/index.js b/node_modules/is-accessor-descriptor/index.js new file mode 100644 index 00000000..d2e6fe8b --- /dev/null +++ b/node_modules/is-accessor-descriptor/index.js @@ -0,0 +1,69 @@ +/*! + * is-accessor-descriptor + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var typeOf = require('kind-of'); + +// accessor descriptor properties +var accessor = { + get: 'function', + set: 'function', + configurable: 'boolean', + enumerable: 'boolean' +}; + +function isAccessorDescriptor(obj, prop) { + if (typeof prop === 'string') { + var val = Object.getOwnPropertyDescriptor(obj, prop); + return typeof val !== 'undefined'; + } + + if (typeOf(obj) !== 'object') { + return false; + } + + if (has(obj, 'value') || has(obj, 'writable')) { + return false; + } + + if (!has(obj, 'get') || typeof obj.get !== 'function') { + return false; + } + + // tldr: it's valid to have "set" be undefined + // "set" might be undefined if `Object.getOwnPropertyDescriptor` + // was used to get the value, and only `get` was defined by the user + if (has(obj, 'set') && typeof obj[key] !== 'function' && typeof obj[key] !== 'undefined') { + return false; + } + + for (var key in obj) { + if (!accessor.hasOwnProperty(key)) { + continue; + } + + if (typeOf(obj[key]) === accessor[key]) { + continue; + } + + if (typeof obj[key] !== 'undefined') { + return false; + } + } + return true; +} + +function has(obj, key) { + return {}.hasOwnProperty.call(obj, key); +} + +/** + * Expose `isAccessorDescriptor` + */ + +module.exports = isAccessorDescriptor; diff --git a/node_modules/is-accessor-descriptor/node_modules/kind-of/CHANGELOG.md b/node_modules/is-accessor-descriptor/node_modules/kind-of/CHANGELOG.md new file mode 100644 index 00000000..fb30b06d --- /dev/null +++ b/node_modules/is-accessor-descriptor/node_modules/kind-of/CHANGELOG.md @@ -0,0 +1,157 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [6.0.0] - 2017-10-13 + +- refactor code to be more performant +- refactor benchmarks + +## [5.1.0] - 2017-10-13 + +**Added** + +- Merge pull request #15 from aretecode/patch-1 +- adds support and tests for string & array iterators + +**Changed** + +- updates benchmarks + +## [5.0.2] - 2017-08-02 + +- Merge pull request #14 from struct78/master +- Added `undefined` check + +## [5.0.0] - 2017-06-21 + +- Merge pull request #12 from aretecode/iterator +- Set Iterator + Map Iterator +- streamline `isbuffer`, minor edits + +## [4.0.0] - 2017-05-19 + +- Merge pull request #8 from tunnckoCore/master +- update deps + +## [3.2.2] - 2017-05-16 + +- fix version + +## [3.2.1] - 2017-05-16 + +- add browserify + +## [3.2.0] - 2017-04-25 + +- Merge pull request #10 from ksheedlo/unrequire-buffer +- add `promise` support and tests +- Remove unnecessary `Buffer` check + +## [3.1.0] - 2016-12-07 + +- Merge pull request #7 from laggingreflex/err +- add support for `error` and tests +- run update + +## [3.0.4] - 2016-07-29 + +- move tests +- run update + +## [3.0.3] - 2016-05-03 + +- fix prepublish script +- remove unused dep + +## [3.0.0] - 2015-11-17 + +- add typed array support +- Merge pull request #5 from miguelmota/typed-arrays +- adds new tests + +## [2.0.1] - 2015-08-21 + +- use `is-buffer` module + +## [2.0.0] - 2015-05-31 + +- Create fallback for `Array.isArray` if used as a browser package +- Merge pull request #2 from dtothefp/patch-1 +- Merge pull request #3 from pdehaan/patch-1 +- Merge branch 'master' of https://github.com/chorks/kind-of into chorks-master +- optimizations, mostly date and regex + +## [1.1.0] - 2015-02-09 + +- adds `buffer` support +- adds tests for `buffer` + +## [1.0.0] - 2015-01-19 + +- update benchmarks +- optimizations based on benchmarks + +## [0.1.2] - 2014-10-26 + +- return `typeof` value if it's not an object. very slight speed improvement +- use `.slice` +- adds benchmarks + +## [0.1.0] - 2014-9-26 + +- first commit + +[6.0.0]: https://github.com/jonschlinkert/kind-of/compare/5.1.0...6.0.0 +[5.1.0]: https://github.com/jonschlinkert/kind-of/compare/5.0.2...5.1.0 +[5.0.2]: https://github.com/jonschlinkert/kind-of/compare/5.0.1...5.0.2 +[5.0.1]: https://github.com/jonschlinkert/kind-of/compare/5.0.0...5.0.1 +[5.0.0]: https://github.com/jonschlinkert/kind-of/compare/4.0.0...5.0.0 +[4.0.0]: https://github.com/jonschlinkert/kind-of/compare/3.2.2...4.0.0 +[3.2.2]: https://github.com/jonschlinkert/kind-of/compare/3.2.1...3.2.2 +[3.2.1]: https://github.com/jonschlinkert/kind-of/compare/3.2.0...3.2.1 +[3.2.0]: https://github.com/jonschlinkert/kind-of/compare/3.1.0...3.2.0 +[3.1.0]: https://github.com/jonschlinkert/kind-of/compare/3.0.4...3.1.0 +[3.0.4]: https://github.com/jonschlinkert/kind-of/compare/3.0.3...3.0.4 +[3.0.3]: https://github.com/jonschlinkert/kind-of/compare/3.0.0...3.0.3 +[3.0.0]: https://github.com/jonschlinkert/kind-of/compare/2.0.1...3.0.0 +[2.0.1]: https://github.com/jonschlinkert/kind-of/compare/2.0.0...2.0.1 +[2.0.0]: https://github.com/jonschlinkert/kind-of/compare/1.1.0...2.0.0 +[1.1.0]: https://github.com/jonschlinkert/kind-of/compare/1.0.0...1.1.0 +[1.0.0]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...1.0.0 +[0.1.2]: https://github.com/jonschlinkert/kind-of/compare/0.1.0...0.1.2 +[0.1.0]: https://github.com/jonschlinkert/kind-of/commit/2fae09b0b19b1aadb558e9be39f0c3ef6034eb87 + +[Unreleased]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...HEAD +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog + diff --git a/node_modules/is-accessor-descriptor/node_modules/kind-of/LICENSE b/node_modules/is-accessor-descriptor/node_modules/kind-of/LICENSE new file mode 100644 index 00000000..3f2eca18 --- /dev/null +++ b/node_modules/is-accessor-descriptor/node_modules/kind-of/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-accessor-descriptor/node_modules/kind-of/README.md b/node_modules/is-accessor-descriptor/node_modules/kind-of/README.md new file mode 100644 index 00000000..4b0d4a81 --- /dev/null +++ b/node_modules/is-accessor-descriptor/node_modules/kind-of/README.md @@ -0,0 +1,365 @@ +# kind-of [![NPM version](https://img.shields.io/npm/v/kind-of.svg?style=flat)](https://www.npmjs.com/package/kind-of) [![NPM monthly downloads](https://img.shields.io/npm/dm/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![NPM total downloads](https://img.shields.io/npm/dt/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/kind-of.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/kind-of) + +> Get the native type of a value. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save kind-of +``` + +Install with [bower](https://bower.io/) + +```sh +$ bower install kind-of --save +``` + +## Why use this? + +1. [it's fast](#benchmarks) | [optimizations](#optimizations) +2. [better type checking](#better-type-checking) + +## Usage + +> es5, es6, and browser ready + +```js +var kindOf = require('kind-of'); + +kindOf(undefined); +//=> 'undefined' + +kindOf(null); +//=> 'null' + +kindOf(true); +//=> 'boolean' + +kindOf(false); +//=> 'boolean' + +kindOf(new Buffer('')); +//=> 'buffer' + +kindOf(42); +//=> 'number' + +kindOf('str'); +//=> 'string' + +kindOf(arguments); +//=> 'arguments' + +kindOf({}); +//=> 'object' + +kindOf(Object.create(null)); +//=> 'object' + +kindOf(new Test()); +//=> 'object' + +kindOf(new Date()); +//=> 'date' + +kindOf([1, 2, 3]); +//=> 'array' + +kindOf(/foo/); +//=> 'regexp' + +kindOf(new RegExp('foo')); +//=> 'regexp' + +kindOf(new Error('error')); +//=> 'error' + +kindOf(function () {}); +//=> 'function' + +kindOf(function * () {}); +//=> 'generatorfunction' + +kindOf(Symbol('str')); +//=> 'symbol' + +kindOf(new Map()); +//=> 'map' + +kindOf(new WeakMap()); +//=> 'weakmap' + +kindOf(new Set()); +//=> 'set' + +kindOf(new WeakSet()); +//=> 'weakset' + +kindOf(new Int8Array()); +//=> 'int8array' + +kindOf(new Uint8Array()); +//=> 'uint8array' + +kindOf(new Uint8ClampedArray()); +//=> 'uint8clampedarray' + +kindOf(new Int16Array()); +//=> 'int16array' + +kindOf(new Uint16Array()); +//=> 'uint16array' + +kindOf(new Int32Array()); +//=> 'int32array' + +kindOf(new Uint32Array()); +//=> 'uint32array' + +kindOf(new Float32Array()); +//=> 'float32array' + +kindOf(new Float64Array()); +//=> 'float64array' +``` + +## Benchmarks + +Benchmarked against [typeof](http://github.com/CodingFu/typeof) and [type-of](https://github.com/ForbesLindesay/type-of). + +```bash +# arguments (32 bytes) + kind-of x 17,024,098 ops/sec ±1.90% (86 runs sampled) + lib-type-of x 11,926,235 ops/sec ±1.34% (83 runs sampled) + lib-typeof x 9,245,257 ops/sec ±1.22% (87 runs sampled) + + fastest is kind-of (by 161% avg) + +# array (22 bytes) + kind-of x 17,196,492 ops/sec ±1.07% (88 runs sampled) + lib-type-of x 8,838,283 ops/sec ±1.02% (87 runs sampled) + lib-typeof x 8,677,848 ops/sec ±0.87% (87 runs sampled) + + fastest is kind-of (by 196% avg) + +# boolean (24 bytes) + kind-of x 16,841,600 ops/sec ±1.10% (86 runs sampled) + lib-type-of x 8,096,787 ops/sec ±0.95% (87 runs sampled) + lib-typeof x 8,423,345 ops/sec ±1.15% (86 runs sampled) + + fastest is kind-of (by 204% avg) + +# buffer (38 bytes) + kind-of x 14,848,060 ops/sec ±1.05% (86 runs sampled) + lib-type-of x 3,671,577 ops/sec ±1.49% (87 runs sampled) + lib-typeof x 8,360,236 ops/sec ±1.24% (86 runs sampled) + + fastest is kind-of (by 247% avg) + +# date (30 bytes) + kind-of x 16,067,761 ops/sec ±1.58% (86 runs sampled) + lib-type-of x 8,954,436 ops/sec ±1.40% (87 runs sampled) + lib-typeof x 8,488,307 ops/sec ±1.51% (84 runs sampled) + + fastest is kind-of (by 184% avg) + +# error (36 bytes) + kind-of x 9,634,090 ops/sec ±1.12% (89 runs sampled) + lib-type-of x 7,735,624 ops/sec ±1.32% (86 runs sampled) + lib-typeof x 7,442,160 ops/sec ±1.11% (90 runs sampled) + + fastest is kind-of (by 127% avg) + +# function (34 bytes) + kind-of x 10,031,494 ops/sec ±1.27% (86 runs sampled) + lib-type-of x 9,502,757 ops/sec ±1.17% (89 runs sampled) + lib-typeof x 8,278,985 ops/sec ±1.08% (88 runs sampled) + + fastest is kind-of (by 113% avg) + +# null (24 bytes) + kind-of x 18,159,808 ops/sec ±1.92% (86 runs sampled) + lib-type-of x 12,927,635 ops/sec ±1.01% (88 runs sampled) + lib-typeof x 7,958,234 ops/sec ±1.21% (89 runs sampled) + + fastest is kind-of (by 174% avg) + +# number (22 bytes) + kind-of x 17,846,779 ops/sec ±0.91% (85 runs sampled) + lib-type-of x 3,316,636 ops/sec ±1.19% (86 runs sampled) + lib-typeof x 2,329,477 ops/sec ±2.21% (85 runs sampled) + + fastest is kind-of (by 632% avg) + +# object-plain (47 bytes) + kind-of x 7,085,155 ops/sec ±1.05% (88 runs sampled) + lib-type-of x 8,870,930 ops/sec ±1.06% (83 runs sampled) + lib-typeof x 8,716,024 ops/sec ±1.05% (87 runs sampled) + + fastest is lib-type-of (by 112% avg) + +# regex (25 bytes) + kind-of x 14,196,052 ops/sec ±1.65% (84 runs sampled) + lib-type-of x 9,554,164 ops/sec ±1.25% (88 runs sampled) + lib-typeof x 8,359,691 ops/sec ±1.07% (87 runs sampled) + + fastest is kind-of (by 158% avg) + +# string (33 bytes) + kind-of x 16,131,428 ops/sec ±1.41% (85 runs sampled) + lib-type-of x 7,273,172 ops/sec ±1.05% (87 runs sampled) + lib-typeof x 7,382,635 ops/sec ±1.17% (85 runs sampled) + + fastest is kind-of (by 220% avg) + +# symbol (34 bytes) + kind-of x 17,011,537 ops/sec ±1.24% (86 runs sampled) + lib-type-of x 3,492,454 ops/sec ±1.23% (89 runs sampled) + lib-typeof x 7,471,235 ops/sec ±2.48% (87 runs sampled) + + fastest is kind-of (by 310% avg) + +# template-strings (36 bytes) + kind-of x 15,434,250 ops/sec ±1.46% (83 runs sampled) + lib-type-of x 7,157,907 ops/sec ±0.97% (87 runs sampled) + lib-typeof x 7,517,986 ops/sec ±0.92% (86 runs sampled) + + fastest is kind-of (by 210% avg) + +# undefined (29 bytes) + kind-of x 19,167,115 ops/sec ±1.71% (87 runs sampled) + lib-type-of x 15,477,740 ops/sec ±1.63% (85 runs sampled) + lib-typeof x 19,075,495 ops/sec ±1.17% (83 runs sampled) + + fastest is lib-typeof,kind-of + +``` + +## Optimizations + +In 7 out of 8 cases, this library is 2x-10x faster than other top libraries included in the benchmarks. There are a few things that lead to this performance advantage, none of them hard and fast rules, but all of them simple and repeatable in almost any code library: + +1. Optimize around the fastest and most common use cases first. Of course, this will change from project-to-project, but I took some time to understand how and why `typeof` checks were being used in my own libraries and other libraries I use a lot. +2. Optimize around bottlenecks - In other words, the order in which conditionals are implemented is significant, because each check is only as fast as the failing checks that came before it. Here, the biggest bottleneck by far is checking for plain objects (an object that was created by the `Object` constructor). I opted to make this check happen by process of elimination rather than brute force up front (e.g. by using something like `val.constructor.name`), so that every other type check would not be penalized it. +3. Don't do uneccessary processing - why do `.slice(8, -1).toLowerCase();` just to get the word `regex`? It's much faster to do `if (type === '[object RegExp]') return 'regex'` +4. There is no reason to make the code in a microlib as terse as possible, just to win points for making it shorter. It's always better to favor performant code over terse code. You will always only be using a single `require()` statement to use the library anyway, regardless of how the code is written. + +## Better type checking + +kind-of seems to be more consistently "correct" than other type checking libs I've looked at. For example, here are some differing results from other popular libs: + +### [typeof](https://github.com/CodingFu/typeof) lib + +Incorrectly identifies instances of custom constructors (pretty common): + +```js +var typeOf = require('typeof'); +function Test() {} +console.log(typeOf(new Test())); +//=> 'test' +``` + +Returns `object` instead of `arguments`: + +```js +function foo() { + console.log(typeOf(arguments)) //=> 'object' +} +foo(); +``` + +### [type-of](https://github.com/ForbesLindesay/type-of) lib + +Incorrectly returns `object` for generator functions, buffers, `Map`, `Set`, `WeakMap` and `WeakSet`: + +```js +function * foo() {} +console.log(typeOf(foo)); +//=> 'object' +console.log(typeOf(new Buffer(''))); +//=> 'object' +console.log(typeOf(new Map())); +//=> 'object' +console.log(typeOf(new Set())); +//=> 'object' +console.log(typeOf(new WeakMap())); +//=> 'object' +console.log(typeOf(new WeakSet())); +//=> 'object' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [is-number](https://www.npmjs.com/package/is-number): Returns true if the value is a number. comprehensive tests. | [homepage](https://github.com/jonschlinkert/is-number "Returns true if the value is a number. comprehensive tests.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 98 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [aretecode](https://github.com/aretecode) | +| 2 | [miguelmota](https://github.com/miguelmota) | +| 1 | [dtothefp](https://github.com/dtothefp) | +| 1 | [ianstormtaylor](https://github.com/ianstormtaylor) | +| 1 | [ksheedlo](https://github.com/ksheedlo) | +| 1 | [pdehaan](https://github.com/pdehaan) | +| 1 | [laggingreflex](https://github.com/laggingreflex) | +| 1 | [charlike-old](https://github.com/charlike-old) | + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on December 01, 2017._ \ No newline at end of file diff --git a/node_modules/is-accessor-descriptor/node_modules/kind-of/index.js b/node_modules/is-accessor-descriptor/node_modules/kind-of/index.js new file mode 100644 index 00000000..aa2bb394 --- /dev/null +++ b/node_modules/is-accessor-descriptor/node_modules/kind-of/index.js @@ -0,0 +1,129 @@ +var toString = Object.prototype.toString; + +module.exports = function kindOf(val) { + if (val === void 0) return 'undefined'; + if (val === null) return 'null'; + + var type = typeof val; + if (type === 'boolean') return 'boolean'; + if (type === 'string') return 'string'; + if (type === 'number') return 'number'; + if (type === 'symbol') return 'symbol'; + if (type === 'function') { + return isGeneratorFn(val) ? 'generatorfunction' : 'function'; + } + + if (isArray(val)) return 'array'; + if (isBuffer(val)) return 'buffer'; + if (isArguments(val)) return 'arguments'; + if (isDate(val)) return 'date'; + if (isError(val)) return 'error'; + if (isRegexp(val)) return 'regexp'; + + switch (ctorName(val)) { + case 'Symbol': return 'symbol'; + case 'Promise': return 'promise'; + + // Set, Map, WeakSet, WeakMap + case 'WeakMap': return 'weakmap'; + case 'WeakSet': return 'weakset'; + case 'Map': return 'map'; + case 'Set': return 'set'; + + // 8-bit typed arrays + case 'Int8Array': return 'int8array'; + case 'Uint8Array': return 'uint8array'; + case 'Uint8ClampedArray': return 'uint8clampedarray'; + + // 16-bit typed arrays + case 'Int16Array': return 'int16array'; + case 'Uint16Array': return 'uint16array'; + + // 32-bit typed arrays + case 'Int32Array': return 'int32array'; + case 'Uint32Array': return 'uint32array'; + case 'Float32Array': return 'float32array'; + case 'Float64Array': return 'float64array'; + } + + if (isGeneratorObj(val)) { + return 'generator'; + } + + // Non-plain objects + type = toString.call(val); + switch (type) { + case '[object Object]': return 'object'; + // iterators + case '[object Map Iterator]': return 'mapiterator'; + case '[object Set Iterator]': return 'setiterator'; + case '[object String Iterator]': return 'stringiterator'; + case '[object Array Iterator]': return 'arrayiterator'; + } + + // other + return type.slice(8, -1).toLowerCase().replace(/\s/g, ''); +}; + +function ctorName(val) { + return val.constructor ? val.constructor.name : null; +} + +function isArray(val) { + if (Array.isArray) return Array.isArray(val); + return val instanceof Array; +} + +function isError(val) { + return val instanceof Error || (typeof val.message === 'string' && val.constructor && typeof val.constructor.stackTraceLimit === 'number'); +} + +function isDate(val) { + if (val instanceof Date) return true; + return typeof val.toDateString === 'function' + && typeof val.getDate === 'function' + && typeof val.setDate === 'function'; +} + +function isRegexp(val) { + if (val instanceof RegExp) return true; + return typeof val.flags === 'string' + && typeof val.ignoreCase === 'boolean' + && typeof val.multiline === 'boolean' + && typeof val.global === 'boolean'; +} + +function isGeneratorFn(name, val) { + return ctorName(name) === 'GeneratorFunction'; +} + +function isGeneratorObj(val) { + return typeof val.throw === 'function' + && typeof val.return === 'function' + && typeof val.next === 'function'; +} + +function isArguments(val) { + try { + if (typeof val.length === 'number' && typeof val.callee === 'function') { + return true; + } + } catch (err) { + if (err.message.indexOf('callee') !== -1) { + return true; + } + } + return false; +} + +/** + * If you need to support Safari 5-7 (8-10 yr-old browser), + * take a look at https://github.com/feross/is-buffer + */ + +function isBuffer(val) { + if (val.constructor && typeof val.constructor.isBuffer === 'function') { + return val.constructor.isBuffer(val); + } + return false; +} diff --git a/node_modules/is-accessor-descriptor/node_modules/kind-of/package.json b/node_modules/is-accessor-descriptor/node_modules/kind-of/package.json new file mode 100644 index 00000000..73d70aee --- /dev/null +++ b/node_modules/is-accessor-descriptor/node_modules/kind-of/package.json @@ -0,0 +1,88 @@ +{ + "name": "kind-of", + "description": "Get the native type of a value.", + "version": "6.0.2", + "homepage": "https://github.com/jonschlinkert/kind-of", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "David Fox-Powell (https://dtothefp.github.io/me)", + "James (https://twitter.com/aretecode)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Ken Sheedlo (kensheedlo.com)", + "laggingreflex (https://github.com/laggingreflex)", + "Miguel Mota (https://miguelmota.com)", + "Peter deHaan (http://about.me/peterdehaan)", + "tunnckoCore (https://i.am.charlike.online)" + ], + "repository": "jonschlinkert/kind-of", + "bugs": { + "url": "https://github.com/jonschlinkert/kind-of/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "prepublish": "browserify -o browser.js -e index.js -s index --bare" + }, + "devDependencies": { + "benchmarked": "^2.0.0", + "browserify": "^14.4.0", + "gulp-format-md": "^1.0.0", + "mocha": "^4.0.1", + "write": "^1.0.3" + }, + "keywords": [ + "arguments", + "array", + "boolean", + "check", + "date", + "function", + "is", + "is-type", + "is-type-of", + "kind", + "kind-of", + "number", + "object", + "of", + "regexp", + "string", + "test", + "type", + "type-of", + "typeof", + "types" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "is-glob", + "is-number", + "is-primitive" + ] + }, + "reflinks": [ + "type-of", + "typeof", + "verb" + ] + } +} diff --git a/node_modules/is-accessor-descriptor/package.json b/node_modules/is-accessor-descriptor/package.json new file mode 100644 index 00000000..47b97ac3 --- /dev/null +++ b/node_modules/is-accessor-descriptor/package.json @@ -0,0 +1,73 @@ +{ + "name": "is-accessor-descriptor", + "description": "Returns true if a value has the characteristics of a valid JavaScript accessor descriptor.", + "version": "1.0.0", + "homepage": "https://github.com/jonschlinkert/is-accessor-descriptor", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "jonschlinkert/is-accessor-descriptor", + "bugs": { + "url": "https://github.com/jonschlinkert/is-accessor-descriptor/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "kind-of": "^6.0.0" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "accessor", + "check", + "data", + "descriptor", + "get", + "getter", + "is", + "keys", + "object", + "properties", + "property", + "set", + "setter", + "type", + "valid", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "is-accessor-descriptor", + "is-data-descriptor", + "is-descriptor", + "is-plain-object", + "isobject" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-binary-path/index.js b/node_modules/is-binary-path/index.js new file mode 100644 index 00000000..6c8c7e72 --- /dev/null +++ b/node_modules/is-binary-path/index.js @@ -0,0 +1,12 @@ +'use strict'; +var path = require('path'); +var binaryExtensions = require('binary-extensions'); +var exts = Object.create(null); + +binaryExtensions.forEach(function (el) { + exts[el] = true; +}); + +module.exports = function (filepath) { + return path.extname(filepath).slice(1).toLowerCase() in exts; +}; diff --git a/node_modules/is-binary-path/license b/node_modules/is-binary-path/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/is-binary-path/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-binary-path/package.json b/node_modules/is-binary-path/package.json new file mode 100644 index 00000000..cd21d885 --- /dev/null +++ b/node_modules/is-binary-path/package.json @@ -0,0 +1,39 @@ +{ + "name": "is-binary-path", + "version": "1.0.1", + "description": "Check if a filepath is a binary file", + "license": "MIT", + "repository": "sindresorhus/is-binary-path", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "bin", + "binary", + "ext", + "extensions", + "extension", + "file", + "path", + "check", + "detect", + "is" + ], + "dependencies": { + "binary-extensions": "^1.0.0" + }, + "devDependencies": { + "ava": "0.0.4" + } +} diff --git a/node_modules/is-binary-path/readme.md b/node_modules/is-binary-path/readme.md new file mode 100644 index 00000000..a17d6a24 --- /dev/null +++ b/node_modules/is-binary-path/readme.md @@ -0,0 +1,34 @@ +# is-binary-path [![Build Status](https://travis-ci.org/sindresorhus/is-binary-path.svg?branch=master)](https://travis-ci.org/sindresorhus/is-binary-path) + +> Check if a filepath is a binary file + + +## Install + +``` +$ npm install --save is-binary-path +``` + + +## Usage + +```js +var isBinaryPath = require('is-binary-path'); + +isBinaryPath('src/unicorn.png'); +//=> true + +isBinaryPath('src/unicorn.txt'); +//=> false +``` + + +## Related + +- [`binary-extensions`](https://github.com/sindresorhus/binary-extensions) - List of binary file extensions +- [`is-text-path`](https://github.com/sindresorhus/is-text-path) - Check if a filepath is a text file + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/is-buffer/LICENSE b/node_modules/is-buffer/LICENSE new file mode 100644 index 00000000..0c068cee --- /dev/null +++ b/node_modules/is-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-buffer/README.md b/node_modules/is-buffer/README.md new file mode 100644 index 00000000..cce0a8cf --- /dev/null +++ b/node_modules/is-buffer/README.md @@ -0,0 +1,53 @@ +# is-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/is-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/is-buffer +[npm-image]: https://img.shields.io/npm/v/is-buffer.svg +[npm-url]: https://npmjs.org/package/is-buffer +[downloads-image]: https://img.shields.io/npm/dm/is-buffer.svg +[downloads-url]: https://npmjs.org/package/is-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Determine if an object is a [`Buffer`](http://nodejs.org/api/buffer.html) (including the [browserify Buffer](https://github.com/feross/buffer)) + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[saucelabs-image]: https://saucelabs.com/browser-matrix/is-buffer.svg +[saucelabs-url]: https://saucelabs.com/u/is-buffer + +## Why not use `Buffer.isBuffer`? + +This module lets you check if an object is a `Buffer` without using `Buffer.isBuffer` (which includes the whole [buffer](https://github.com/feross/buffer) module in [browserify](http://browserify.org/)). + +It's future-proof and works in node too! + +## install + +```bash +npm install is-buffer +``` + +## usage + +```js +var isBuffer = require('is-buffer') + +isBuffer(new Buffer(4)) // true + +isBuffer(undefined) // false +isBuffer(null) // false +isBuffer('') // false +isBuffer(true) // false +isBuffer(false) // false +isBuffer(0) // false +isBuffer(1) // false +isBuffer(1.0) // false +isBuffer('string') // false +isBuffer({}) // false +isBuffer(function foo () {}) // false +``` + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/is-buffer/index.js b/node_modules/is-buffer/index.js new file mode 100644 index 00000000..9cce3965 --- /dev/null +++ b/node_modules/is-buffer/index.js @@ -0,0 +1,21 @@ +/*! + * Determine if an object is a Buffer + * + * @author Feross Aboukhadijeh + * @license MIT + */ + +// The _isBuffer check is for Safari 5-7 support, because it's missing +// Object.prototype.constructor. Remove this eventually +module.exports = function (obj) { + return obj != null && (isBuffer(obj) || isSlowBuffer(obj) || !!obj._isBuffer) +} + +function isBuffer (obj) { + return !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj) +} + +// For Node v0.10 support. Remove this eventually. +function isSlowBuffer (obj) { + return typeof obj.readFloatLE === 'function' && typeof obj.slice === 'function' && isBuffer(obj.slice(0, 0)) +} diff --git a/node_modules/is-buffer/package.json b/node_modules/is-buffer/package.json new file mode 100644 index 00000000..ea12137a --- /dev/null +++ b/node_modules/is-buffer/package.json @@ -0,0 +1,51 @@ +{ + "name": "is-buffer", + "description": "Determine if an object is a Buffer", + "version": "1.1.6", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org/" + }, + "bugs": { + "url": "https://github.com/feross/is-buffer/issues" + }, + "dependencies": {}, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0", + "zuul": "^3.0.0" + }, + "keywords": [ + "buffer", + "buffers", + "type", + "core buffer", + "browser buffer", + "browserify", + "typed array", + "uint32array", + "int16array", + "int32array", + "float32array", + "float64array", + "browser", + "arraybuffer", + "dataview" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/is-buffer.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "zuul -- test/*.js", + "test-browser-local": "zuul --local -- test/*.js", + "test-node": "tape test/*.js" + }, + "testling": { + "files": "test/*.js" + } +} diff --git a/node_modules/is-buffer/test/basic.js b/node_modules/is-buffer/test/basic.js new file mode 100644 index 00000000..be4f8e43 --- /dev/null +++ b/node_modules/is-buffer/test/basic.js @@ -0,0 +1,24 @@ +var isBuffer = require('../') +var test = require('tape') + +test('is-buffer', function (t) { + t.equal(isBuffer(Buffer.alloc(4)), true, 'new Buffer(4)') + t.equal(isBuffer(Buffer.allocUnsafeSlow(100)), true, 'SlowBuffer(100)') + + t.equal(isBuffer(undefined), false, 'undefined') + t.equal(isBuffer(null), false, 'null') + t.equal(isBuffer(''), false, 'empty string') + t.equal(isBuffer(true), false, 'true') + t.equal(isBuffer(false), false, 'false') + t.equal(isBuffer(0), false, '0') + t.equal(isBuffer(1), false, '1') + t.equal(isBuffer(1.0), false, '1.0') + t.equal(isBuffer('string'), false, 'string') + t.equal(isBuffer({}), false, '{}') + t.equal(isBuffer([]), false, '[]') + t.equal(isBuffer(function foo () {}), false, 'function foo () {}') + t.equal(isBuffer({ isBuffer: null }), false, '{ isBuffer: null }') + t.equal(isBuffer({ isBuffer: function () { throw new Error() } }), false, '{ isBuffer: function () { throw new Error() } }') + + t.end() +}) diff --git a/node_modules/is-ci/LICENSE b/node_modules/is-ci/LICENSE new file mode 100644 index 00000000..67846832 --- /dev/null +++ b/node_modules/is-ci/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016-2018 Thomas Watson Steen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/is-ci/README.md b/node_modules/is-ci/README.md new file mode 100644 index 00000000..bc3840a2 --- /dev/null +++ b/node_modules/is-ci/README.md @@ -0,0 +1,50 @@ +# is-ci + +Returns `true` if the current environment is a Continuous Integration +server. + +Please [open an issue](https://github.com/watson/is-ci/issues) if your +CI server isn't properly detected :) + +[![npm](https://img.shields.io/npm/v/is-ci.svg)](https://www.npmjs.com/package/is-ci) +[![Build status](https://travis-ci.org/watson/is-ci.svg?branch=master)](https://travis-ci.org/watson/is-ci) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://github.com/feross/standard) + +## Installation + +```bash +npm install is-ci --save +``` + +## Programmatic Usage + +```js +const isCI = require('is-ci') + +if (isCI) { + console.log('The code is running on a CI server') +} +``` + +## CLI Usage + +For CLI usage you need to have the `is-ci` executable in your `PATH`. +There's a few ways to do that: + +- Either install the module globally using `npm install is-ci -g` +- Or add the module as a dependency to your app in which case it can be + used inside your package.json scripts as is +- Or provide the full path to the executable, e.g. + `./node_modules/.bin/is-ci` + +```bash +is-ci && echo "This is a CI server" +``` + +## Supported CI tools + +Refer to [ci-info](https://github.com/watson/ci-info#supported-ci-tools) docs for all supported CI's + +## License + +[MIT](LICENSE) diff --git a/node_modules/is-ci/bin.js b/node_modules/is-ci/bin.js new file mode 100755 index 00000000..0c56c01f --- /dev/null +++ b/node_modules/is-ci/bin.js @@ -0,0 +1,4 @@ +#!/usr/bin/env node +'use strict' + +process.exit(require('./') ? 0 : 1) diff --git a/node_modules/is-ci/index.js b/node_modules/is-ci/index.js new file mode 100644 index 00000000..d4cb67aa --- /dev/null +++ b/node_modules/is-ci/index.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('ci-info').isCI diff --git a/node_modules/is-ci/package.json b/node_modules/is-ci/package.json new file mode 100644 index 00000000..2d567b01 --- /dev/null +++ b/node_modules/is-ci/package.json @@ -0,0 +1,38 @@ +{ + "name": "is-ci", + "version": "1.2.1", + "description": "Detect if the current environment is a CI server", + "bin": "bin.js", + "main": "index.js", + "dependencies": { + "ci-info": "^1.5.0" + }, + "devDependencies": { + "clear-require": "^1.0.1", + "standard": "^11.0.1" + }, + "scripts": { + "test": "standard && node test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/watson/is-ci.git" + }, + "keywords": [ + "ci", + "continuous", + "integration", + "test", + "detect" + ], + "author": "Thomas Watson Steen (https://twitter.com/wa7son)", + "license": "MIT", + "bugs": { + "url": "https://github.com/watson/is-ci/issues" + }, + "homepage": "https://github.com/watson/is-ci", + "coordinates": [ + 55.778255, + 12.593033 + ] +} diff --git a/node_modules/is-data-descriptor/LICENSE b/node_modules/is-data-descriptor/LICENSE new file mode 100644 index 00000000..e33d14b7 --- /dev/null +++ b/node_modules/is-data-descriptor/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-data-descriptor/README.md b/node_modules/is-data-descriptor/README.md new file mode 100644 index 00000000..42b07144 --- /dev/null +++ b/node_modules/is-data-descriptor/README.md @@ -0,0 +1,161 @@ +# is-data-descriptor [![NPM version](https://img.shields.io/npm/v/is-data-descriptor.svg?style=flat)](https://www.npmjs.com/package/is-data-descriptor) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-data-descriptor.svg?style=flat)](https://npmjs.org/package/is-data-descriptor) [![NPM total downloads](https://img.shields.io/npm/dt/is-data-descriptor.svg?style=flat)](https://npmjs.org/package/is-data-descriptor) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-data-descriptor.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-data-descriptor) + +> Returns true if a value has the characteristics of a valid JavaScript data descriptor. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-data-descriptor +``` + +## Usage + +```js +var isDataDesc = require('is-data-descriptor'); +``` + +## Examples + +`true` when the descriptor has valid properties with valid values. + +```js +// `value` can be anything +isDataDesc({value: 'foo'}) +isDataDesc({value: function() {}}) +isDataDesc({value: true}) +//=> true +``` + +`false` when not an object + +```js +isDataDesc('a') +//=> false +isDataDesc(null) +//=> false +isDataDesc([]) +//=> false +``` + +`false` when the object has invalid properties + +```js +isDataDesc({value: 'foo', bar: 'baz'}) +//=> false +isDataDesc({value: 'foo', bar: 'baz'}) +//=> false +isDataDesc({value: 'foo', get: function(){}}) +//=> false +isDataDesc({get: function(){}, value: 'foo'}) +//=> false +``` + +`false` when a value is not the correct type + +```js +isDataDesc({value: 'foo', enumerable: 'foo'}) +//=> false +isDataDesc({value: 'foo', configurable: 'foo'}) +//=> false +isDataDesc({value: 'foo', writable: 'foo'}) +//=> false +``` + +## Valid properties + +The only valid data descriptor properties are the following: + +* `configurable` (required) +* `enumerable` (required) +* `value` (optional) +* `writable` (optional) + +To be a valid data descriptor, either `value` or `writable` must be defined. + +**Invalid properties** + +A descriptor may have additional _invalid_ properties (an error will **not** be thrown). + +```js +var foo = {}; + +Object.defineProperty(foo, 'bar', { + enumerable: true, + whatever: 'blah', // invalid, but doesn't cause an error + get: function() { + return 'baz'; + } +}); + +console.log(foo.bar); +//=> 'baz' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-accessor-descriptor](https://www.npmjs.com/package/is-accessor-descriptor): Returns true if a value has the characteristics of a valid JavaScript accessor descriptor. | [homepage](https://github.com/jonschlinkert/is-accessor-descriptor "Returns true if a value has the characteristics of a valid JavaScript accessor descriptor.") +* [is-data-descriptor](https://www.npmjs.com/package/is-data-descriptor): Returns true if a value has the characteristics of a valid JavaScript data descriptor. | [homepage](https://github.com/jonschlinkert/is-data-descriptor "Returns true if a value has the characteristics of a valid JavaScript data descriptor.") +* [is-descriptor](https://www.npmjs.com/package/is-descriptor): Returns true if a value has the characteristics of a valid JavaScript descriptor. Works for… [more](https://github.com/jonschlinkert/is-descriptor) | [homepage](https://github.com/jonschlinkert/is-descriptor "Returns true if a value has the characteristics of a valid JavaScript descriptor. Works for data descriptors and accessor descriptors.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 21 | [jonschlinkert](https://github.com/jonschlinkert) | +| 2 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on November 01, 2017._ \ No newline at end of file diff --git a/node_modules/is-data-descriptor/index.js b/node_modules/is-data-descriptor/index.js new file mode 100644 index 00000000..cfeae361 --- /dev/null +++ b/node_modules/is-data-descriptor/index.js @@ -0,0 +1,49 @@ +/*! + * is-data-descriptor + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var typeOf = require('kind-of'); + +module.exports = function isDataDescriptor(obj, prop) { + // data descriptor properties + var data = { + configurable: 'boolean', + enumerable: 'boolean', + writable: 'boolean' + }; + + if (typeOf(obj) !== 'object') { + return false; + } + + if (typeof prop === 'string') { + var val = Object.getOwnPropertyDescriptor(obj, prop); + return typeof val !== 'undefined'; + } + + if (!('value' in obj) && !('writable' in obj)) { + return false; + } + + for (var key in obj) { + if (key === 'value') continue; + + if (!data.hasOwnProperty(key)) { + continue; + } + + if (typeOf(obj[key]) === data[key]) { + continue; + } + + if (typeof obj[key] !== 'undefined') { + return false; + } + } + return true; +}; diff --git a/node_modules/is-data-descriptor/node_modules/kind-of/CHANGELOG.md b/node_modules/is-data-descriptor/node_modules/kind-of/CHANGELOG.md new file mode 100644 index 00000000..fb30b06d --- /dev/null +++ b/node_modules/is-data-descriptor/node_modules/kind-of/CHANGELOG.md @@ -0,0 +1,157 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [6.0.0] - 2017-10-13 + +- refactor code to be more performant +- refactor benchmarks + +## [5.1.0] - 2017-10-13 + +**Added** + +- Merge pull request #15 from aretecode/patch-1 +- adds support and tests for string & array iterators + +**Changed** + +- updates benchmarks + +## [5.0.2] - 2017-08-02 + +- Merge pull request #14 from struct78/master +- Added `undefined` check + +## [5.0.0] - 2017-06-21 + +- Merge pull request #12 from aretecode/iterator +- Set Iterator + Map Iterator +- streamline `isbuffer`, minor edits + +## [4.0.0] - 2017-05-19 + +- Merge pull request #8 from tunnckoCore/master +- update deps + +## [3.2.2] - 2017-05-16 + +- fix version + +## [3.2.1] - 2017-05-16 + +- add browserify + +## [3.2.0] - 2017-04-25 + +- Merge pull request #10 from ksheedlo/unrequire-buffer +- add `promise` support and tests +- Remove unnecessary `Buffer` check + +## [3.1.0] - 2016-12-07 + +- Merge pull request #7 from laggingreflex/err +- add support for `error` and tests +- run update + +## [3.0.4] - 2016-07-29 + +- move tests +- run update + +## [3.0.3] - 2016-05-03 + +- fix prepublish script +- remove unused dep + +## [3.0.0] - 2015-11-17 + +- add typed array support +- Merge pull request #5 from miguelmota/typed-arrays +- adds new tests + +## [2.0.1] - 2015-08-21 + +- use `is-buffer` module + +## [2.0.0] - 2015-05-31 + +- Create fallback for `Array.isArray` if used as a browser package +- Merge pull request #2 from dtothefp/patch-1 +- Merge pull request #3 from pdehaan/patch-1 +- Merge branch 'master' of https://github.com/chorks/kind-of into chorks-master +- optimizations, mostly date and regex + +## [1.1.0] - 2015-02-09 + +- adds `buffer` support +- adds tests for `buffer` + +## [1.0.0] - 2015-01-19 + +- update benchmarks +- optimizations based on benchmarks + +## [0.1.2] - 2014-10-26 + +- return `typeof` value if it's not an object. very slight speed improvement +- use `.slice` +- adds benchmarks + +## [0.1.0] - 2014-9-26 + +- first commit + +[6.0.0]: https://github.com/jonschlinkert/kind-of/compare/5.1.0...6.0.0 +[5.1.0]: https://github.com/jonschlinkert/kind-of/compare/5.0.2...5.1.0 +[5.0.2]: https://github.com/jonschlinkert/kind-of/compare/5.0.1...5.0.2 +[5.0.1]: https://github.com/jonschlinkert/kind-of/compare/5.0.0...5.0.1 +[5.0.0]: https://github.com/jonschlinkert/kind-of/compare/4.0.0...5.0.0 +[4.0.0]: https://github.com/jonschlinkert/kind-of/compare/3.2.2...4.0.0 +[3.2.2]: https://github.com/jonschlinkert/kind-of/compare/3.2.1...3.2.2 +[3.2.1]: https://github.com/jonschlinkert/kind-of/compare/3.2.0...3.2.1 +[3.2.0]: https://github.com/jonschlinkert/kind-of/compare/3.1.0...3.2.0 +[3.1.0]: https://github.com/jonschlinkert/kind-of/compare/3.0.4...3.1.0 +[3.0.4]: https://github.com/jonschlinkert/kind-of/compare/3.0.3...3.0.4 +[3.0.3]: https://github.com/jonschlinkert/kind-of/compare/3.0.0...3.0.3 +[3.0.0]: https://github.com/jonschlinkert/kind-of/compare/2.0.1...3.0.0 +[2.0.1]: https://github.com/jonschlinkert/kind-of/compare/2.0.0...2.0.1 +[2.0.0]: https://github.com/jonschlinkert/kind-of/compare/1.1.0...2.0.0 +[1.1.0]: https://github.com/jonschlinkert/kind-of/compare/1.0.0...1.1.0 +[1.0.0]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...1.0.0 +[0.1.2]: https://github.com/jonschlinkert/kind-of/compare/0.1.0...0.1.2 +[0.1.0]: https://github.com/jonschlinkert/kind-of/commit/2fae09b0b19b1aadb558e9be39f0c3ef6034eb87 + +[Unreleased]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...HEAD +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog + diff --git a/node_modules/is-data-descriptor/node_modules/kind-of/LICENSE b/node_modules/is-data-descriptor/node_modules/kind-of/LICENSE new file mode 100644 index 00000000..3f2eca18 --- /dev/null +++ b/node_modules/is-data-descriptor/node_modules/kind-of/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-data-descriptor/node_modules/kind-of/README.md b/node_modules/is-data-descriptor/node_modules/kind-of/README.md new file mode 100644 index 00000000..4b0d4a81 --- /dev/null +++ b/node_modules/is-data-descriptor/node_modules/kind-of/README.md @@ -0,0 +1,365 @@ +# kind-of [![NPM version](https://img.shields.io/npm/v/kind-of.svg?style=flat)](https://www.npmjs.com/package/kind-of) [![NPM monthly downloads](https://img.shields.io/npm/dm/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![NPM total downloads](https://img.shields.io/npm/dt/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/kind-of.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/kind-of) + +> Get the native type of a value. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save kind-of +``` + +Install with [bower](https://bower.io/) + +```sh +$ bower install kind-of --save +``` + +## Why use this? + +1. [it's fast](#benchmarks) | [optimizations](#optimizations) +2. [better type checking](#better-type-checking) + +## Usage + +> es5, es6, and browser ready + +```js +var kindOf = require('kind-of'); + +kindOf(undefined); +//=> 'undefined' + +kindOf(null); +//=> 'null' + +kindOf(true); +//=> 'boolean' + +kindOf(false); +//=> 'boolean' + +kindOf(new Buffer('')); +//=> 'buffer' + +kindOf(42); +//=> 'number' + +kindOf('str'); +//=> 'string' + +kindOf(arguments); +//=> 'arguments' + +kindOf({}); +//=> 'object' + +kindOf(Object.create(null)); +//=> 'object' + +kindOf(new Test()); +//=> 'object' + +kindOf(new Date()); +//=> 'date' + +kindOf([1, 2, 3]); +//=> 'array' + +kindOf(/foo/); +//=> 'regexp' + +kindOf(new RegExp('foo')); +//=> 'regexp' + +kindOf(new Error('error')); +//=> 'error' + +kindOf(function () {}); +//=> 'function' + +kindOf(function * () {}); +//=> 'generatorfunction' + +kindOf(Symbol('str')); +//=> 'symbol' + +kindOf(new Map()); +//=> 'map' + +kindOf(new WeakMap()); +//=> 'weakmap' + +kindOf(new Set()); +//=> 'set' + +kindOf(new WeakSet()); +//=> 'weakset' + +kindOf(new Int8Array()); +//=> 'int8array' + +kindOf(new Uint8Array()); +//=> 'uint8array' + +kindOf(new Uint8ClampedArray()); +//=> 'uint8clampedarray' + +kindOf(new Int16Array()); +//=> 'int16array' + +kindOf(new Uint16Array()); +//=> 'uint16array' + +kindOf(new Int32Array()); +//=> 'int32array' + +kindOf(new Uint32Array()); +//=> 'uint32array' + +kindOf(new Float32Array()); +//=> 'float32array' + +kindOf(new Float64Array()); +//=> 'float64array' +``` + +## Benchmarks + +Benchmarked against [typeof](http://github.com/CodingFu/typeof) and [type-of](https://github.com/ForbesLindesay/type-of). + +```bash +# arguments (32 bytes) + kind-of x 17,024,098 ops/sec ±1.90% (86 runs sampled) + lib-type-of x 11,926,235 ops/sec ±1.34% (83 runs sampled) + lib-typeof x 9,245,257 ops/sec ±1.22% (87 runs sampled) + + fastest is kind-of (by 161% avg) + +# array (22 bytes) + kind-of x 17,196,492 ops/sec ±1.07% (88 runs sampled) + lib-type-of x 8,838,283 ops/sec ±1.02% (87 runs sampled) + lib-typeof x 8,677,848 ops/sec ±0.87% (87 runs sampled) + + fastest is kind-of (by 196% avg) + +# boolean (24 bytes) + kind-of x 16,841,600 ops/sec ±1.10% (86 runs sampled) + lib-type-of x 8,096,787 ops/sec ±0.95% (87 runs sampled) + lib-typeof x 8,423,345 ops/sec ±1.15% (86 runs sampled) + + fastest is kind-of (by 204% avg) + +# buffer (38 bytes) + kind-of x 14,848,060 ops/sec ±1.05% (86 runs sampled) + lib-type-of x 3,671,577 ops/sec ±1.49% (87 runs sampled) + lib-typeof x 8,360,236 ops/sec ±1.24% (86 runs sampled) + + fastest is kind-of (by 247% avg) + +# date (30 bytes) + kind-of x 16,067,761 ops/sec ±1.58% (86 runs sampled) + lib-type-of x 8,954,436 ops/sec ±1.40% (87 runs sampled) + lib-typeof x 8,488,307 ops/sec ±1.51% (84 runs sampled) + + fastest is kind-of (by 184% avg) + +# error (36 bytes) + kind-of x 9,634,090 ops/sec ±1.12% (89 runs sampled) + lib-type-of x 7,735,624 ops/sec ±1.32% (86 runs sampled) + lib-typeof x 7,442,160 ops/sec ±1.11% (90 runs sampled) + + fastest is kind-of (by 127% avg) + +# function (34 bytes) + kind-of x 10,031,494 ops/sec ±1.27% (86 runs sampled) + lib-type-of x 9,502,757 ops/sec ±1.17% (89 runs sampled) + lib-typeof x 8,278,985 ops/sec ±1.08% (88 runs sampled) + + fastest is kind-of (by 113% avg) + +# null (24 bytes) + kind-of x 18,159,808 ops/sec ±1.92% (86 runs sampled) + lib-type-of x 12,927,635 ops/sec ±1.01% (88 runs sampled) + lib-typeof x 7,958,234 ops/sec ±1.21% (89 runs sampled) + + fastest is kind-of (by 174% avg) + +# number (22 bytes) + kind-of x 17,846,779 ops/sec ±0.91% (85 runs sampled) + lib-type-of x 3,316,636 ops/sec ±1.19% (86 runs sampled) + lib-typeof x 2,329,477 ops/sec ±2.21% (85 runs sampled) + + fastest is kind-of (by 632% avg) + +# object-plain (47 bytes) + kind-of x 7,085,155 ops/sec ±1.05% (88 runs sampled) + lib-type-of x 8,870,930 ops/sec ±1.06% (83 runs sampled) + lib-typeof x 8,716,024 ops/sec ±1.05% (87 runs sampled) + + fastest is lib-type-of (by 112% avg) + +# regex (25 bytes) + kind-of x 14,196,052 ops/sec ±1.65% (84 runs sampled) + lib-type-of x 9,554,164 ops/sec ±1.25% (88 runs sampled) + lib-typeof x 8,359,691 ops/sec ±1.07% (87 runs sampled) + + fastest is kind-of (by 158% avg) + +# string (33 bytes) + kind-of x 16,131,428 ops/sec ±1.41% (85 runs sampled) + lib-type-of x 7,273,172 ops/sec ±1.05% (87 runs sampled) + lib-typeof x 7,382,635 ops/sec ±1.17% (85 runs sampled) + + fastest is kind-of (by 220% avg) + +# symbol (34 bytes) + kind-of x 17,011,537 ops/sec ±1.24% (86 runs sampled) + lib-type-of x 3,492,454 ops/sec ±1.23% (89 runs sampled) + lib-typeof x 7,471,235 ops/sec ±2.48% (87 runs sampled) + + fastest is kind-of (by 310% avg) + +# template-strings (36 bytes) + kind-of x 15,434,250 ops/sec ±1.46% (83 runs sampled) + lib-type-of x 7,157,907 ops/sec ±0.97% (87 runs sampled) + lib-typeof x 7,517,986 ops/sec ±0.92% (86 runs sampled) + + fastest is kind-of (by 210% avg) + +# undefined (29 bytes) + kind-of x 19,167,115 ops/sec ±1.71% (87 runs sampled) + lib-type-of x 15,477,740 ops/sec ±1.63% (85 runs sampled) + lib-typeof x 19,075,495 ops/sec ±1.17% (83 runs sampled) + + fastest is lib-typeof,kind-of + +``` + +## Optimizations + +In 7 out of 8 cases, this library is 2x-10x faster than other top libraries included in the benchmarks. There are a few things that lead to this performance advantage, none of them hard and fast rules, but all of them simple and repeatable in almost any code library: + +1. Optimize around the fastest and most common use cases first. Of course, this will change from project-to-project, but I took some time to understand how and why `typeof` checks were being used in my own libraries and other libraries I use a lot. +2. Optimize around bottlenecks - In other words, the order in which conditionals are implemented is significant, because each check is only as fast as the failing checks that came before it. Here, the biggest bottleneck by far is checking for plain objects (an object that was created by the `Object` constructor). I opted to make this check happen by process of elimination rather than brute force up front (e.g. by using something like `val.constructor.name`), so that every other type check would not be penalized it. +3. Don't do uneccessary processing - why do `.slice(8, -1).toLowerCase();` just to get the word `regex`? It's much faster to do `if (type === '[object RegExp]') return 'regex'` +4. There is no reason to make the code in a microlib as terse as possible, just to win points for making it shorter. It's always better to favor performant code over terse code. You will always only be using a single `require()` statement to use the library anyway, regardless of how the code is written. + +## Better type checking + +kind-of seems to be more consistently "correct" than other type checking libs I've looked at. For example, here are some differing results from other popular libs: + +### [typeof](https://github.com/CodingFu/typeof) lib + +Incorrectly identifies instances of custom constructors (pretty common): + +```js +var typeOf = require('typeof'); +function Test() {} +console.log(typeOf(new Test())); +//=> 'test' +``` + +Returns `object` instead of `arguments`: + +```js +function foo() { + console.log(typeOf(arguments)) //=> 'object' +} +foo(); +``` + +### [type-of](https://github.com/ForbesLindesay/type-of) lib + +Incorrectly returns `object` for generator functions, buffers, `Map`, `Set`, `WeakMap` and `WeakSet`: + +```js +function * foo() {} +console.log(typeOf(foo)); +//=> 'object' +console.log(typeOf(new Buffer(''))); +//=> 'object' +console.log(typeOf(new Map())); +//=> 'object' +console.log(typeOf(new Set())); +//=> 'object' +console.log(typeOf(new WeakMap())); +//=> 'object' +console.log(typeOf(new WeakSet())); +//=> 'object' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [is-number](https://www.npmjs.com/package/is-number): Returns true if the value is a number. comprehensive tests. | [homepage](https://github.com/jonschlinkert/is-number "Returns true if the value is a number. comprehensive tests.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 98 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [aretecode](https://github.com/aretecode) | +| 2 | [miguelmota](https://github.com/miguelmota) | +| 1 | [dtothefp](https://github.com/dtothefp) | +| 1 | [ianstormtaylor](https://github.com/ianstormtaylor) | +| 1 | [ksheedlo](https://github.com/ksheedlo) | +| 1 | [pdehaan](https://github.com/pdehaan) | +| 1 | [laggingreflex](https://github.com/laggingreflex) | +| 1 | [charlike-old](https://github.com/charlike-old) | + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on December 01, 2017._ \ No newline at end of file diff --git a/node_modules/is-data-descriptor/node_modules/kind-of/index.js b/node_modules/is-data-descriptor/node_modules/kind-of/index.js new file mode 100644 index 00000000..aa2bb394 --- /dev/null +++ b/node_modules/is-data-descriptor/node_modules/kind-of/index.js @@ -0,0 +1,129 @@ +var toString = Object.prototype.toString; + +module.exports = function kindOf(val) { + if (val === void 0) return 'undefined'; + if (val === null) return 'null'; + + var type = typeof val; + if (type === 'boolean') return 'boolean'; + if (type === 'string') return 'string'; + if (type === 'number') return 'number'; + if (type === 'symbol') return 'symbol'; + if (type === 'function') { + return isGeneratorFn(val) ? 'generatorfunction' : 'function'; + } + + if (isArray(val)) return 'array'; + if (isBuffer(val)) return 'buffer'; + if (isArguments(val)) return 'arguments'; + if (isDate(val)) return 'date'; + if (isError(val)) return 'error'; + if (isRegexp(val)) return 'regexp'; + + switch (ctorName(val)) { + case 'Symbol': return 'symbol'; + case 'Promise': return 'promise'; + + // Set, Map, WeakSet, WeakMap + case 'WeakMap': return 'weakmap'; + case 'WeakSet': return 'weakset'; + case 'Map': return 'map'; + case 'Set': return 'set'; + + // 8-bit typed arrays + case 'Int8Array': return 'int8array'; + case 'Uint8Array': return 'uint8array'; + case 'Uint8ClampedArray': return 'uint8clampedarray'; + + // 16-bit typed arrays + case 'Int16Array': return 'int16array'; + case 'Uint16Array': return 'uint16array'; + + // 32-bit typed arrays + case 'Int32Array': return 'int32array'; + case 'Uint32Array': return 'uint32array'; + case 'Float32Array': return 'float32array'; + case 'Float64Array': return 'float64array'; + } + + if (isGeneratorObj(val)) { + return 'generator'; + } + + // Non-plain objects + type = toString.call(val); + switch (type) { + case '[object Object]': return 'object'; + // iterators + case '[object Map Iterator]': return 'mapiterator'; + case '[object Set Iterator]': return 'setiterator'; + case '[object String Iterator]': return 'stringiterator'; + case '[object Array Iterator]': return 'arrayiterator'; + } + + // other + return type.slice(8, -1).toLowerCase().replace(/\s/g, ''); +}; + +function ctorName(val) { + return val.constructor ? val.constructor.name : null; +} + +function isArray(val) { + if (Array.isArray) return Array.isArray(val); + return val instanceof Array; +} + +function isError(val) { + return val instanceof Error || (typeof val.message === 'string' && val.constructor && typeof val.constructor.stackTraceLimit === 'number'); +} + +function isDate(val) { + if (val instanceof Date) return true; + return typeof val.toDateString === 'function' + && typeof val.getDate === 'function' + && typeof val.setDate === 'function'; +} + +function isRegexp(val) { + if (val instanceof RegExp) return true; + return typeof val.flags === 'string' + && typeof val.ignoreCase === 'boolean' + && typeof val.multiline === 'boolean' + && typeof val.global === 'boolean'; +} + +function isGeneratorFn(name, val) { + return ctorName(name) === 'GeneratorFunction'; +} + +function isGeneratorObj(val) { + return typeof val.throw === 'function' + && typeof val.return === 'function' + && typeof val.next === 'function'; +} + +function isArguments(val) { + try { + if (typeof val.length === 'number' && typeof val.callee === 'function') { + return true; + } + } catch (err) { + if (err.message.indexOf('callee') !== -1) { + return true; + } + } + return false; +} + +/** + * If you need to support Safari 5-7 (8-10 yr-old browser), + * take a look at https://github.com/feross/is-buffer + */ + +function isBuffer(val) { + if (val.constructor && typeof val.constructor.isBuffer === 'function') { + return val.constructor.isBuffer(val); + } + return false; +} diff --git a/node_modules/is-data-descriptor/node_modules/kind-of/package.json b/node_modules/is-data-descriptor/node_modules/kind-of/package.json new file mode 100644 index 00000000..73d70aee --- /dev/null +++ b/node_modules/is-data-descriptor/node_modules/kind-of/package.json @@ -0,0 +1,88 @@ +{ + "name": "kind-of", + "description": "Get the native type of a value.", + "version": "6.0.2", + "homepage": "https://github.com/jonschlinkert/kind-of", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "David Fox-Powell (https://dtothefp.github.io/me)", + "James (https://twitter.com/aretecode)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Ken Sheedlo (kensheedlo.com)", + "laggingreflex (https://github.com/laggingreflex)", + "Miguel Mota (https://miguelmota.com)", + "Peter deHaan (http://about.me/peterdehaan)", + "tunnckoCore (https://i.am.charlike.online)" + ], + "repository": "jonschlinkert/kind-of", + "bugs": { + "url": "https://github.com/jonschlinkert/kind-of/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "prepublish": "browserify -o browser.js -e index.js -s index --bare" + }, + "devDependencies": { + "benchmarked": "^2.0.0", + "browserify": "^14.4.0", + "gulp-format-md": "^1.0.0", + "mocha": "^4.0.1", + "write": "^1.0.3" + }, + "keywords": [ + "arguments", + "array", + "boolean", + "check", + "date", + "function", + "is", + "is-type", + "is-type-of", + "kind", + "kind-of", + "number", + "object", + "of", + "regexp", + "string", + "test", + "type", + "type-of", + "typeof", + "types" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "is-glob", + "is-number", + "is-primitive" + ] + }, + "reflinks": [ + "type-of", + "typeof", + "verb" + ] + } +} diff --git a/node_modules/is-data-descriptor/package.json b/node_modules/is-data-descriptor/package.json new file mode 100644 index 00000000..0b093838 --- /dev/null +++ b/node_modules/is-data-descriptor/package.json @@ -0,0 +1,72 @@ +{ + "name": "is-data-descriptor", + "description": "Returns true if a value has the characteristics of a valid JavaScript data descriptor.", + "version": "1.0.0", + "homepage": "https://github.com/jonschlinkert/is-data-descriptor", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "jonschlinkert/is-data-descriptor", + "bugs": { + "url": "https://github.com/jonschlinkert/is-data-descriptor/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "kind-of": "^6.0.0" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "accessor", + "check", + "data", + "descriptor", + "get", + "getter", + "is", + "keys", + "object", + "properties", + "property", + "set", + "setter", + "type", + "valid", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "is-accessor-descriptor", + "is-data-descriptor", + "is-descriptor", + "isobject" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-descriptor/LICENSE b/node_modules/is-descriptor/LICENSE new file mode 100644 index 00000000..c0d7f136 --- /dev/null +++ b/node_modules/is-descriptor/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/is-descriptor/README.md b/node_modules/is-descriptor/README.md new file mode 100644 index 00000000..658e5330 --- /dev/null +++ b/node_modules/is-descriptor/README.md @@ -0,0 +1,193 @@ +# is-descriptor [![NPM version](https://img.shields.io/npm/v/is-descriptor.svg?style=flat)](https://www.npmjs.com/package/is-descriptor) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-descriptor.svg?style=flat)](https://npmjs.org/package/is-descriptor) [![NPM total downloads](https://img.shields.io/npm/dt/is-descriptor.svg?style=flat)](https://npmjs.org/package/is-descriptor) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-descriptor.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-descriptor) + +> Returns true if a value has the characteristics of a valid JavaScript descriptor. Works for data descriptors and accessor descriptors. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-descriptor +``` + +## Usage + +```js +var isDescriptor = require('is-descriptor'); + +isDescriptor({value: 'foo'}) +//=> true +isDescriptor({get: function(){}, set: function(){}}) +//=> true +isDescriptor({get: 'foo', set: function(){}}) +//=> false +``` + +You may also check for a descriptor by passing an object as the first argument and property name (`string`) as the second argument. + +```js +var obj = {}; +obj.foo = 'abc'; + +Object.defineProperty(obj, 'bar', { + value: 'xyz' +}); + +isDescriptor(obj, 'foo'); +//=> true +isDescriptor(obj, 'bar'); +//=> true +``` + +## Examples + +### value type + +`false` when not an object + +```js +isDescriptor('a'); +//=> false +isDescriptor(null); +//=> false +isDescriptor([]); +//=> false +``` + +### data descriptor + +`true` when the object has valid properties with valid values. + +```js +isDescriptor({value: 'foo'}); +//=> true +isDescriptor({value: noop}); +//=> true +``` + +`false` when the object has invalid properties + +```js +isDescriptor({value: 'foo', bar: 'baz'}); +//=> false +isDescriptor({value: 'foo', bar: 'baz'}); +//=> false +isDescriptor({value: 'foo', get: noop}); +//=> false +isDescriptor({get: noop, value: noop}); +//=> false +``` + +`false` when a value is not the correct type + +```js +isDescriptor({value: 'foo', enumerable: 'foo'}); +//=> false +isDescriptor({value: 'foo', configurable: 'foo'}); +//=> false +isDescriptor({value: 'foo', writable: 'foo'}); +//=> false +``` + +### accessor descriptor + +`true` when the object has valid properties with valid values. + +```js +isDescriptor({get: noop, set: noop}); +//=> true +isDescriptor({get: noop}); +//=> true +isDescriptor({set: noop}); +//=> true +``` + +`false` when the object has invalid properties + +```js +isDescriptor({get: noop, set: noop, bar: 'baz'}); +//=> false +isDescriptor({get: noop, writable: true}); +//=> false +isDescriptor({get: noop, value: true}); +//=> false +``` + +`false` when an accessor is not a function + +```js +isDescriptor({get: noop, set: 'baz'}); +//=> false +isDescriptor({get: 'foo', set: noop}); +//=> false +isDescriptor({get: 'foo', bar: 'baz'}); +//=> false +isDescriptor({get: 'foo', set: 'baz'}); +//=> false +``` + +`false` when a value is not the correct type + +```js +isDescriptor({get: noop, set: noop, enumerable: 'foo'}); +//=> false +isDescriptor({set: noop, configurable: 'foo'}); +//=> false +isDescriptor({get: noop, configurable: 'foo'}); +//=> false +``` + +## About + +### Related projects + +* [is-accessor-descriptor](https://www.npmjs.com/package/is-accessor-descriptor): Returns true if a value has the characteristics of a valid JavaScript accessor descriptor. | [homepage](https://github.com/jonschlinkert/is-accessor-descriptor "Returns true if a value has the characteristics of a valid JavaScript accessor descriptor.") +* [is-data-descriptor](https://www.npmjs.com/package/is-data-descriptor): Returns true if a value has the characteristics of a valid JavaScript data descriptor. | [homepage](https://github.com/jonschlinkert/is-data-descriptor "Returns true if a value has the characteristics of a valid JavaScript data descriptor.") +* [is-descriptor](https://www.npmjs.com/package/is-descriptor): Returns true if a value has the characteristics of a valid JavaScript descriptor. Works for… [more](https://github.com/jonschlinkert/is-descriptor) | [homepage](https://github.com/jonschlinkert/is-descriptor "Returns true if a value has the characteristics of a valid JavaScript descriptor. Works for data descriptors and accessor descriptors.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 24 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [doowb](https://github.com/doowb) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 22, 2017._ \ No newline at end of file diff --git a/node_modules/is-descriptor/index.js b/node_modules/is-descriptor/index.js new file mode 100644 index 00000000..c9b91d76 --- /dev/null +++ b/node_modules/is-descriptor/index.js @@ -0,0 +1,22 @@ +/*! + * is-descriptor + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var typeOf = require('kind-of'); +var isAccessor = require('is-accessor-descriptor'); +var isData = require('is-data-descriptor'); + +module.exports = function isDescriptor(obj, key) { + if (typeOf(obj) !== 'object') { + return false; + } + if ('get' in obj) { + return isAccessor(obj, key); + } + return isData(obj, key); +}; diff --git a/node_modules/is-descriptor/node_modules/kind-of/CHANGELOG.md b/node_modules/is-descriptor/node_modules/kind-of/CHANGELOG.md new file mode 100644 index 00000000..fb30b06d --- /dev/null +++ b/node_modules/is-descriptor/node_modules/kind-of/CHANGELOG.md @@ -0,0 +1,157 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [6.0.0] - 2017-10-13 + +- refactor code to be more performant +- refactor benchmarks + +## [5.1.0] - 2017-10-13 + +**Added** + +- Merge pull request #15 from aretecode/patch-1 +- adds support and tests for string & array iterators + +**Changed** + +- updates benchmarks + +## [5.0.2] - 2017-08-02 + +- Merge pull request #14 from struct78/master +- Added `undefined` check + +## [5.0.0] - 2017-06-21 + +- Merge pull request #12 from aretecode/iterator +- Set Iterator + Map Iterator +- streamline `isbuffer`, minor edits + +## [4.0.0] - 2017-05-19 + +- Merge pull request #8 from tunnckoCore/master +- update deps + +## [3.2.2] - 2017-05-16 + +- fix version + +## [3.2.1] - 2017-05-16 + +- add browserify + +## [3.2.0] - 2017-04-25 + +- Merge pull request #10 from ksheedlo/unrequire-buffer +- add `promise` support and tests +- Remove unnecessary `Buffer` check + +## [3.1.0] - 2016-12-07 + +- Merge pull request #7 from laggingreflex/err +- add support for `error` and tests +- run update + +## [3.0.4] - 2016-07-29 + +- move tests +- run update + +## [3.0.3] - 2016-05-03 + +- fix prepublish script +- remove unused dep + +## [3.0.0] - 2015-11-17 + +- add typed array support +- Merge pull request #5 from miguelmota/typed-arrays +- adds new tests + +## [2.0.1] - 2015-08-21 + +- use `is-buffer` module + +## [2.0.0] - 2015-05-31 + +- Create fallback for `Array.isArray` if used as a browser package +- Merge pull request #2 from dtothefp/patch-1 +- Merge pull request #3 from pdehaan/patch-1 +- Merge branch 'master' of https://github.com/chorks/kind-of into chorks-master +- optimizations, mostly date and regex + +## [1.1.0] - 2015-02-09 + +- adds `buffer` support +- adds tests for `buffer` + +## [1.0.0] - 2015-01-19 + +- update benchmarks +- optimizations based on benchmarks + +## [0.1.2] - 2014-10-26 + +- return `typeof` value if it's not an object. very slight speed improvement +- use `.slice` +- adds benchmarks + +## [0.1.0] - 2014-9-26 + +- first commit + +[6.0.0]: https://github.com/jonschlinkert/kind-of/compare/5.1.0...6.0.0 +[5.1.0]: https://github.com/jonschlinkert/kind-of/compare/5.0.2...5.1.0 +[5.0.2]: https://github.com/jonschlinkert/kind-of/compare/5.0.1...5.0.2 +[5.0.1]: https://github.com/jonschlinkert/kind-of/compare/5.0.0...5.0.1 +[5.0.0]: https://github.com/jonschlinkert/kind-of/compare/4.0.0...5.0.0 +[4.0.0]: https://github.com/jonschlinkert/kind-of/compare/3.2.2...4.0.0 +[3.2.2]: https://github.com/jonschlinkert/kind-of/compare/3.2.1...3.2.2 +[3.2.1]: https://github.com/jonschlinkert/kind-of/compare/3.2.0...3.2.1 +[3.2.0]: https://github.com/jonschlinkert/kind-of/compare/3.1.0...3.2.0 +[3.1.0]: https://github.com/jonschlinkert/kind-of/compare/3.0.4...3.1.0 +[3.0.4]: https://github.com/jonschlinkert/kind-of/compare/3.0.3...3.0.4 +[3.0.3]: https://github.com/jonschlinkert/kind-of/compare/3.0.0...3.0.3 +[3.0.0]: https://github.com/jonschlinkert/kind-of/compare/2.0.1...3.0.0 +[2.0.1]: https://github.com/jonschlinkert/kind-of/compare/2.0.0...2.0.1 +[2.0.0]: https://github.com/jonschlinkert/kind-of/compare/1.1.0...2.0.0 +[1.1.0]: https://github.com/jonschlinkert/kind-of/compare/1.0.0...1.1.0 +[1.0.0]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...1.0.0 +[0.1.2]: https://github.com/jonschlinkert/kind-of/compare/0.1.0...0.1.2 +[0.1.0]: https://github.com/jonschlinkert/kind-of/commit/2fae09b0b19b1aadb558e9be39f0c3ef6034eb87 + +[Unreleased]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...HEAD +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog + diff --git a/node_modules/is-descriptor/node_modules/kind-of/LICENSE b/node_modules/is-descriptor/node_modules/kind-of/LICENSE new file mode 100644 index 00000000..3f2eca18 --- /dev/null +++ b/node_modules/is-descriptor/node_modules/kind-of/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-descriptor/node_modules/kind-of/README.md b/node_modules/is-descriptor/node_modules/kind-of/README.md new file mode 100644 index 00000000..4b0d4a81 --- /dev/null +++ b/node_modules/is-descriptor/node_modules/kind-of/README.md @@ -0,0 +1,365 @@ +# kind-of [![NPM version](https://img.shields.io/npm/v/kind-of.svg?style=flat)](https://www.npmjs.com/package/kind-of) [![NPM monthly downloads](https://img.shields.io/npm/dm/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![NPM total downloads](https://img.shields.io/npm/dt/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/kind-of.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/kind-of) + +> Get the native type of a value. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save kind-of +``` + +Install with [bower](https://bower.io/) + +```sh +$ bower install kind-of --save +``` + +## Why use this? + +1. [it's fast](#benchmarks) | [optimizations](#optimizations) +2. [better type checking](#better-type-checking) + +## Usage + +> es5, es6, and browser ready + +```js +var kindOf = require('kind-of'); + +kindOf(undefined); +//=> 'undefined' + +kindOf(null); +//=> 'null' + +kindOf(true); +//=> 'boolean' + +kindOf(false); +//=> 'boolean' + +kindOf(new Buffer('')); +//=> 'buffer' + +kindOf(42); +//=> 'number' + +kindOf('str'); +//=> 'string' + +kindOf(arguments); +//=> 'arguments' + +kindOf({}); +//=> 'object' + +kindOf(Object.create(null)); +//=> 'object' + +kindOf(new Test()); +//=> 'object' + +kindOf(new Date()); +//=> 'date' + +kindOf([1, 2, 3]); +//=> 'array' + +kindOf(/foo/); +//=> 'regexp' + +kindOf(new RegExp('foo')); +//=> 'regexp' + +kindOf(new Error('error')); +//=> 'error' + +kindOf(function () {}); +//=> 'function' + +kindOf(function * () {}); +//=> 'generatorfunction' + +kindOf(Symbol('str')); +//=> 'symbol' + +kindOf(new Map()); +//=> 'map' + +kindOf(new WeakMap()); +//=> 'weakmap' + +kindOf(new Set()); +//=> 'set' + +kindOf(new WeakSet()); +//=> 'weakset' + +kindOf(new Int8Array()); +//=> 'int8array' + +kindOf(new Uint8Array()); +//=> 'uint8array' + +kindOf(new Uint8ClampedArray()); +//=> 'uint8clampedarray' + +kindOf(new Int16Array()); +//=> 'int16array' + +kindOf(new Uint16Array()); +//=> 'uint16array' + +kindOf(new Int32Array()); +//=> 'int32array' + +kindOf(new Uint32Array()); +//=> 'uint32array' + +kindOf(new Float32Array()); +//=> 'float32array' + +kindOf(new Float64Array()); +//=> 'float64array' +``` + +## Benchmarks + +Benchmarked against [typeof](http://github.com/CodingFu/typeof) and [type-of](https://github.com/ForbesLindesay/type-of). + +```bash +# arguments (32 bytes) + kind-of x 17,024,098 ops/sec ±1.90% (86 runs sampled) + lib-type-of x 11,926,235 ops/sec ±1.34% (83 runs sampled) + lib-typeof x 9,245,257 ops/sec ±1.22% (87 runs sampled) + + fastest is kind-of (by 161% avg) + +# array (22 bytes) + kind-of x 17,196,492 ops/sec ±1.07% (88 runs sampled) + lib-type-of x 8,838,283 ops/sec ±1.02% (87 runs sampled) + lib-typeof x 8,677,848 ops/sec ±0.87% (87 runs sampled) + + fastest is kind-of (by 196% avg) + +# boolean (24 bytes) + kind-of x 16,841,600 ops/sec ±1.10% (86 runs sampled) + lib-type-of x 8,096,787 ops/sec ±0.95% (87 runs sampled) + lib-typeof x 8,423,345 ops/sec ±1.15% (86 runs sampled) + + fastest is kind-of (by 204% avg) + +# buffer (38 bytes) + kind-of x 14,848,060 ops/sec ±1.05% (86 runs sampled) + lib-type-of x 3,671,577 ops/sec ±1.49% (87 runs sampled) + lib-typeof x 8,360,236 ops/sec ±1.24% (86 runs sampled) + + fastest is kind-of (by 247% avg) + +# date (30 bytes) + kind-of x 16,067,761 ops/sec ±1.58% (86 runs sampled) + lib-type-of x 8,954,436 ops/sec ±1.40% (87 runs sampled) + lib-typeof x 8,488,307 ops/sec ±1.51% (84 runs sampled) + + fastest is kind-of (by 184% avg) + +# error (36 bytes) + kind-of x 9,634,090 ops/sec ±1.12% (89 runs sampled) + lib-type-of x 7,735,624 ops/sec ±1.32% (86 runs sampled) + lib-typeof x 7,442,160 ops/sec ±1.11% (90 runs sampled) + + fastest is kind-of (by 127% avg) + +# function (34 bytes) + kind-of x 10,031,494 ops/sec ±1.27% (86 runs sampled) + lib-type-of x 9,502,757 ops/sec ±1.17% (89 runs sampled) + lib-typeof x 8,278,985 ops/sec ±1.08% (88 runs sampled) + + fastest is kind-of (by 113% avg) + +# null (24 bytes) + kind-of x 18,159,808 ops/sec ±1.92% (86 runs sampled) + lib-type-of x 12,927,635 ops/sec ±1.01% (88 runs sampled) + lib-typeof x 7,958,234 ops/sec ±1.21% (89 runs sampled) + + fastest is kind-of (by 174% avg) + +# number (22 bytes) + kind-of x 17,846,779 ops/sec ±0.91% (85 runs sampled) + lib-type-of x 3,316,636 ops/sec ±1.19% (86 runs sampled) + lib-typeof x 2,329,477 ops/sec ±2.21% (85 runs sampled) + + fastest is kind-of (by 632% avg) + +# object-plain (47 bytes) + kind-of x 7,085,155 ops/sec ±1.05% (88 runs sampled) + lib-type-of x 8,870,930 ops/sec ±1.06% (83 runs sampled) + lib-typeof x 8,716,024 ops/sec ±1.05% (87 runs sampled) + + fastest is lib-type-of (by 112% avg) + +# regex (25 bytes) + kind-of x 14,196,052 ops/sec ±1.65% (84 runs sampled) + lib-type-of x 9,554,164 ops/sec ±1.25% (88 runs sampled) + lib-typeof x 8,359,691 ops/sec ±1.07% (87 runs sampled) + + fastest is kind-of (by 158% avg) + +# string (33 bytes) + kind-of x 16,131,428 ops/sec ±1.41% (85 runs sampled) + lib-type-of x 7,273,172 ops/sec ±1.05% (87 runs sampled) + lib-typeof x 7,382,635 ops/sec ±1.17% (85 runs sampled) + + fastest is kind-of (by 220% avg) + +# symbol (34 bytes) + kind-of x 17,011,537 ops/sec ±1.24% (86 runs sampled) + lib-type-of x 3,492,454 ops/sec ±1.23% (89 runs sampled) + lib-typeof x 7,471,235 ops/sec ±2.48% (87 runs sampled) + + fastest is kind-of (by 310% avg) + +# template-strings (36 bytes) + kind-of x 15,434,250 ops/sec ±1.46% (83 runs sampled) + lib-type-of x 7,157,907 ops/sec ±0.97% (87 runs sampled) + lib-typeof x 7,517,986 ops/sec ±0.92% (86 runs sampled) + + fastest is kind-of (by 210% avg) + +# undefined (29 bytes) + kind-of x 19,167,115 ops/sec ±1.71% (87 runs sampled) + lib-type-of x 15,477,740 ops/sec ±1.63% (85 runs sampled) + lib-typeof x 19,075,495 ops/sec ±1.17% (83 runs sampled) + + fastest is lib-typeof,kind-of + +``` + +## Optimizations + +In 7 out of 8 cases, this library is 2x-10x faster than other top libraries included in the benchmarks. There are a few things that lead to this performance advantage, none of them hard and fast rules, but all of them simple and repeatable in almost any code library: + +1. Optimize around the fastest and most common use cases first. Of course, this will change from project-to-project, but I took some time to understand how and why `typeof` checks were being used in my own libraries and other libraries I use a lot. +2. Optimize around bottlenecks - In other words, the order in which conditionals are implemented is significant, because each check is only as fast as the failing checks that came before it. Here, the biggest bottleneck by far is checking for plain objects (an object that was created by the `Object` constructor). I opted to make this check happen by process of elimination rather than brute force up front (e.g. by using something like `val.constructor.name`), so that every other type check would not be penalized it. +3. Don't do uneccessary processing - why do `.slice(8, -1).toLowerCase();` just to get the word `regex`? It's much faster to do `if (type === '[object RegExp]') return 'regex'` +4. There is no reason to make the code in a microlib as terse as possible, just to win points for making it shorter. It's always better to favor performant code over terse code. You will always only be using a single `require()` statement to use the library anyway, regardless of how the code is written. + +## Better type checking + +kind-of seems to be more consistently "correct" than other type checking libs I've looked at. For example, here are some differing results from other popular libs: + +### [typeof](https://github.com/CodingFu/typeof) lib + +Incorrectly identifies instances of custom constructors (pretty common): + +```js +var typeOf = require('typeof'); +function Test() {} +console.log(typeOf(new Test())); +//=> 'test' +``` + +Returns `object` instead of `arguments`: + +```js +function foo() { + console.log(typeOf(arguments)) //=> 'object' +} +foo(); +``` + +### [type-of](https://github.com/ForbesLindesay/type-of) lib + +Incorrectly returns `object` for generator functions, buffers, `Map`, `Set`, `WeakMap` and `WeakSet`: + +```js +function * foo() {} +console.log(typeOf(foo)); +//=> 'object' +console.log(typeOf(new Buffer(''))); +//=> 'object' +console.log(typeOf(new Map())); +//=> 'object' +console.log(typeOf(new Set())); +//=> 'object' +console.log(typeOf(new WeakMap())); +//=> 'object' +console.log(typeOf(new WeakSet())); +//=> 'object' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [is-number](https://www.npmjs.com/package/is-number): Returns true if the value is a number. comprehensive tests. | [homepage](https://github.com/jonschlinkert/is-number "Returns true if the value is a number. comprehensive tests.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 98 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [aretecode](https://github.com/aretecode) | +| 2 | [miguelmota](https://github.com/miguelmota) | +| 1 | [dtothefp](https://github.com/dtothefp) | +| 1 | [ianstormtaylor](https://github.com/ianstormtaylor) | +| 1 | [ksheedlo](https://github.com/ksheedlo) | +| 1 | [pdehaan](https://github.com/pdehaan) | +| 1 | [laggingreflex](https://github.com/laggingreflex) | +| 1 | [charlike-old](https://github.com/charlike-old) | + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on December 01, 2017._ \ No newline at end of file diff --git a/node_modules/is-descriptor/node_modules/kind-of/index.js b/node_modules/is-descriptor/node_modules/kind-of/index.js new file mode 100644 index 00000000..aa2bb394 --- /dev/null +++ b/node_modules/is-descriptor/node_modules/kind-of/index.js @@ -0,0 +1,129 @@ +var toString = Object.prototype.toString; + +module.exports = function kindOf(val) { + if (val === void 0) return 'undefined'; + if (val === null) return 'null'; + + var type = typeof val; + if (type === 'boolean') return 'boolean'; + if (type === 'string') return 'string'; + if (type === 'number') return 'number'; + if (type === 'symbol') return 'symbol'; + if (type === 'function') { + return isGeneratorFn(val) ? 'generatorfunction' : 'function'; + } + + if (isArray(val)) return 'array'; + if (isBuffer(val)) return 'buffer'; + if (isArguments(val)) return 'arguments'; + if (isDate(val)) return 'date'; + if (isError(val)) return 'error'; + if (isRegexp(val)) return 'regexp'; + + switch (ctorName(val)) { + case 'Symbol': return 'symbol'; + case 'Promise': return 'promise'; + + // Set, Map, WeakSet, WeakMap + case 'WeakMap': return 'weakmap'; + case 'WeakSet': return 'weakset'; + case 'Map': return 'map'; + case 'Set': return 'set'; + + // 8-bit typed arrays + case 'Int8Array': return 'int8array'; + case 'Uint8Array': return 'uint8array'; + case 'Uint8ClampedArray': return 'uint8clampedarray'; + + // 16-bit typed arrays + case 'Int16Array': return 'int16array'; + case 'Uint16Array': return 'uint16array'; + + // 32-bit typed arrays + case 'Int32Array': return 'int32array'; + case 'Uint32Array': return 'uint32array'; + case 'Float32Array': return 'float32array'; + case 'Float64Array': return 'float64array'; + } + + if (isGeneratorObj(val)) { + return 'generator'; + } + + // Non-plain objects + type = toString.call(val); + switch (type) { + case '[object Object]': return 'object'; + // iterators + case '[object Map Iterator]': return 'mapiterator'; + case '[object Set Iterator]': return 'setiterator'; + case '[object String Iterator]': return 'stringiterator'; + case '[object Array Iterator]': return 'arrayiterator'; + } + + // other + return type.slice(8, -1).toLowerCase().replace(/\s/g, ''); +}; + +function ctorName(val) { + return val.constructor ? val.constructor.name : null; +} + +function isArray(val) { + if (Array.isArray) return Array.isArray(val); + return val instanceof Array; +} + +function isError(val) { + return val instanceof Error || (typeof val.message === 'string' && val.constructor && typeof val.constructor.stackTraceLimit === 'number'); +} + +function isDate(val) { + if (val instanceof Date) return true; + return typeof val.toDateString === 'function' + && typeof val.getDate === 'function' + && typeof val.setDate === 'function'; +} + +function isRegexp(val) { + if (val instanceof RegExp) return true; + return typeof val.flags === 'string' + && typeof val.ignoreCase === 'boolean' + && typeof val.multiline === 'boolean' + && typeof val.global === 'boolean'; +} + +function isGeneratorFn(name, val) { + return ctorName(name) === 'GeneratorFunction'; +} + +function isGeneratorObj(val) { + return typeof val.throw === 'function' + && typeof val.return === 'function' + && typeof val.next === 'function'; +} + +function isArguments(val) { + try { + if (typeof val.length === 'number' && typeof val.callee === 'function') { + return true; + } + } catch (err) { + if (err.message.indexOf('callee') !== -1) { + return true; + } + } + return false; +} + +/** + * If you need to support Safari 5-7 (8-10 yr-old browser), + * take a look at https://github.com/feross/is-buffer + */ + +function isBuffer(val) { + if (val.constructor && typeof val.constructor.isBuffer === 'function') { + return val.constructor.isBuffer(val); + } + return false; +} diff --git a/node_modules/is-descriptor/node_modules/kind-of/package.json b/node_modules/is-descriptor/node_modules/kind-of/package.json new file mode 100644 index 00000000..73d70aee --- /dev/null +++ b/node_modules/is-descriptor/node_modules/kind-of/package.json @@ -0,0 +1,88 @@ +{ + "name": "kind-of", + "description": "Get the native type of a value.", + "version": "6.0.2", + "homepage": "https://github.com/jonschlinkert/kind-of", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "David Fox-Powell (https://dtothefp.github.io/me)", + "James (https://twitter.com/aretecode)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Ken Sheedlo (kensheedlo.com)", + "laggingreflex (https://github.com/laggingreflex)", + "Miguel Mota (https://miguelmota.com)", + "Peter deHaan (http://about.me/peterdehaan)", + "tunnckoCore (https://i.am.charlike.online)" + ], + "repository": "jonschlinkert/kind-of", + "bugs": { + "url": "https://github.com/jonschlinkert/kind-of/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "prepublish": "browserify -o browser.js -e index.js -s index --bare" + }, + "devDependencies": { + "benchmarked": "^2.0.0", + "browserify": "^14.4.0", + "gulp-format-md": "^1.0.0", + "mocha": "^4.0.1", + "write": "^1.0.3" + }, + "keywords": [ + "arguments", + "array", + "boolean", + "check", + "date", + "function", + "is", + "is-type", + "is-type-of", + "kind", + "kind-of", + "number", + "object", + "of", + "regexp", + "string", + "test", + "type", + "type-of", + "typeof", + "types" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "is-glob", + "is-number", + "is-primitive" + ] + }, + "reflinks": [ + "type-of", + "typeof", + "verb" + ] + } +} diff --git a/node_modules/is-descriptor/package.json b/node_modules/is-descriptor/package.json new file mode 100644 index 00000000..87b2975b --- /dev/null +++ b/node_modules/is-descriptor/package.json @@ -0,0 +1,75 @@ +{ + "name": "is-descriptor", + "description": "Returns true if a value has the characteristics of a valid JavaScript descriptor. Works for data descriptors and accessor descriptors.", + "version": "1.0.2", + "homepage": "https://github.com/jonschlinkert/is-descriptor", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "(https://github.com/wtgtybhertgeghgtwtg)" + ], + "repository": "jonschlinkert/is-descriptor", + "bugs": { + "url": "https://github.com/jonschlinkert/is-descriptor/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "accessor", + "check", + "data", + "descriptor", + "get", + "getter", + "is", + "keys", + "object", + "properties", + "property", + "set", + "setter", + "type", + "valid", + "value" + ], + "verb": { + "related": { + "list": [ + "is-accessor-descriptor", + "is-data-descriptor", + "is-descriptor", + "isobject" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-extendable/LICENSE b/node_modules/is-extendable/LICENSE new file mode 100644 index 00000000..65f90aca --- /dev/null +++ b/node_modules/is-extendable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-extendable/README.md b/node_modules/is-extendable/README.md new file mode 100644 index 00000000..e4cfaebc --- /dev/null +++ b/node_modules/is-extendable/README.md @@ -0,0 +1,72 @@ +# is-extendable [![NPM version](https://badge.fury.io/js/is-extendable.svg)](http://badge.fury.io/js/is-extendable) + +> Returns true if a value is any of the object types: array, regexp, plain object, function or date. This is useful for determining if a value can be extended, e.g. "can the value have keys?" + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i is-extendable --save +``` + +## Usage + +```js +var isExtendable = require('is-extendable'); +``` + +Returns true if the value is any of the following: + +* `array` +* `regexp` +* `plain object` +* `function` +* `date` +* `error` + +## Notes + +All objects in JavaScript can have keys, but it's a pain to check for this, since we ether need to verify that the value is not `null` or `undefined` and: + +* the value is not a primitive, or +* that the object is an `object`, `function` + +Also note that an `extendable` object is not the same as an [extensible object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/isExtensible), which is one that (in es6) is not sealed, frozen, or marked as non-extensible using `preventExtensions`. + +## Related projects + +* [assign-deep](https://github.com/jonschlinkert/assign-deep): Deeply assign the enumerable properties of source objects to a destination object. +* [extend-shallow](https://github.com/jonschlinkert/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. +* [isobject](https://github.com/jonschlinkert/isobject): Returns true if the value is an object and not an array or null. +* [is-plain-object](https://github.com/jonschlinkert/is-plain-object): Returns true if an object was created by the `Object` constructor. +* [is-equal-shallow](https://github.com/jonschlinkert/is-equal-shallow): Does a shallow comparison of two objects, returning false if the keys or values differ. +* [kind-of](https://github.com/jonschlinkert/kind-of): Get the native type of a value. + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/is-extendable/issues/new) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on July 04, 2015._ \ No newline at end of file diff --git a/node_modules/is-extendable/index.js b/node_modules/is-extendable/index.js new file mode 100644 index 00000000..4ee71a44 --- /dev/null +++ b/node_modules/is-extendable/index.js @@ -0,0 +1,13 @@ +/*! + * is-extendable + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +module.exports = function isExtendable(val) { + return typeof val !== 'undefined' && val !== null + && (typeof val === 'object' || typeof val === 'function'); +}; diff --git a/node_modules/is-extendable/package.json b/node_modules/is-extendable/package.json new file mode 100644 index 00000000..5dd006ea --- /dev/null +++ b/node_modules/is-extendable/package.json @@ -0,0 +1,51 @@ +{ + "name": "is-extendable", + "description": "Returns true if a value is any of the object types: array, regexp, plain object, function or date. This is useful for determining if a value can be extended, e.g. \"can the value have keys?\"", + "version": "0.1.1", + "homepage": "https://github.com/jonschlinkert/is-extendable", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extendable", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extendable/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "mocha": "*" + }, + "keywords": [ + "array", + "assign", + "check", + "date", + "extend", + "extensible", + "function", + "is", + "object", + "regex", + "test" + ], + "verbiage": { + "related": { + "list": [ + "isobject", + "is-plain-object", + "kind-of", + "is-extendable", + "is-equal-shallow", + "extend-shallow", + "assign-deep" + ] + } + } +} diff --git a/node_modules/is-extglob/LICENSE b/node_modules/is-extglob/LICENSE new file mode 100644 index 00000000..842218cf --- /dev/null +++ b/node_modules/is-extglob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-extglob/README.md b/node_modules/is-extglob/README.md new file mode 100644 index 00000000..0416af5c --- /dev/null +++ b/node_modules/is-extglob/README.md @@ -0,0 +1,107 @@ +# is-extglob [![NPM version](https://img.shields.io/npm/v/is-extglob.svg?style=flat)](https://www.npmjs.com/package/is-extglob) [![NPM downloads](https://img.shields.io/npm/dm/is-extglob.svg?style=flat)](https://npmjs.org/package/is-extglob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-extglob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-extglob) + +> Returns true if a string has an extglob. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extglob +``` + +## Usage + +```js +var isExtglob = require('is-extglob'); +``` + +**True** + +```js +isExtglob('?(abc)'); +isExtglob('@(abc)'); +isExtglob('!(abc)'); +isExtglob('*(abc)'); +isExtglob('+(abc)'); +``` + +**False** + +Escaped extglobs: + +```js +isExtglob('\\?(abc)'); +isExtglob('\\@(abc)'); +isExtglob('\\!(abc)'); +isExtglob('\\*(abc)'); +isExtglob('\\+(abc)'); +``` + +Everything else... + +```js +isExtglob('foo.js'); +isExtglob('!foo.js'); +isExtglob('*.js'); +isExtglob('**/abc.js'); +isExtglob('abc/*.js'); +isExtglob('abc/(aaa|bbb).js'); +isExtglob('abc/[a-z].js'); +isExtglob('abc/{a,b}.js'); +isExtglob('abc/?.js'); +isExtglob('abc.js'); +isExtglob('abc/def/ghi.js'); +``` + +## History + +**v2.0** + +Adds support for escaping. Escaped exglobs no longer return true. + +## About + +### Related projects + +* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.") +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/is-extglob/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._ \ No newline at end of file diff --git a/node_modules/is-extglob/index.js b/node_modules/is-extglob/index.js new file mode 100644 index 00000000..c1d986fc --- /dev/null +++ b/node_modules/is-extglob/index.js @@ -0,0 +1,20 @@ +/*! + * is-extglob + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +module.exports = function isExtglob(str) { + if (typeof str !== 'string' || str === '') { + return false; + } + + var match; + while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { + if (match[2]) return true; + str = str.slice(match.index + match[0].length); + } + + return false; +}; diff --git a/node_modules/is-extglob/package.json b/node_modules/is-extglob/package.json new file mode 100644 index 00000000..7a908369 --- /dev/null +++ b/node_modules/is-extglob/package.json @@ -0,0 +1,69 @@ +{ + "name": "is-extglob", + "description": "Returns true if a string has an extglob.", + "version": "2.1.1", + "homepage": "https://github.com/jonschlinkert/is-extglob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extglob", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extglob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "has-glob", + "is-glob", + "micromatch" + ] + }, + "reflinks": [ + "verb", + "verb-generate-readme" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-fullwidth-code-point/index.js b/node_modules/is-fullwidth-code-point/index.js new file mode 100644 index 00000000..d506327c --- /dev/null +++ b/node_modules/is-fullwidth-code-point/index.js @@ -0,0 +1,46 @@ +'use strict'; +/* eslint-disable yoda */ +module.exports = x => { + if (Number.isNaN(x)) { + return false; + } + + // code points are derived from: + // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt + if ( + x >= 0x1100 && ( + x <= 0x115f || // Hangul Jamo + x === 0x2329 || // LEFT-POINTING ANGLE BRACKET + x === 0x232a || // RIGHT-POINTING ANGLE BRACKET + // CJK Radicals Supplement .. Enclosed CJK Letters and Months + (0x2e80 <= x && x <= 0x3247 && x !== 0x303f) || + // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A + (0x3250 <= x && x <= 0x4dbf) || + // CJK Unified Ideographs .. Yi Radicals + (0x4e00 <= x && x <= 0xa4c6) || + // Hangul Jamo Extended-A + (0xa960 <= x && x <= 0xa97c) || + // Hangul Syllables + (0xac00 <= x && x <= 0xd7a3) || + // CJK Compatibility Ideographs + (0xf900 <= x && x <= 0xfaff) || + // Vertical Forms + (0xfe10 <= x && x <= 0xfe19) || + // CJK Compatibility Forms .. Small Form Variants + (0xfe30 <= x && x <= 0xfe6b) || + // Halfwidth and Fullwidth Forms + (0xff01 <= x && x <= 0xff60) || + (0xffe0 <= x && x <= 0xffe6) || + // Kana Supplement + (0x1b000 <= x && x <= 0x1b001) || + // Enclosed Ideographic Supplement + (0x1f200 <= x && x <= 0x1f251) || + // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane + (0x20000 <= x && x <= 0x3fffd) + ) + ) { + return true; + } + + return false; +}; diff --git a/node_modules/is-fullwidth-code-point/license b/node_modules/is-fullwidth-code-point/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/is-fullwidth-code-point/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-fullwidth-code-point/package.json b/node_modules/is-fullwidth-code-point/package.json new file mode 100644 index 00000000..3049d9e0 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/package.json @@ -0,0 +1,45 @@ +{ + "name": "is-fullwidth-code-point", + "version": "2.0.0", + "description": "Check if the character represented by a given Unicode code point is fullwidth", + "license": "MIT", + "repository": "sindresorhus/is-fullwidth-code-point", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "fullwidth", + "full-width", + "full", + "width", + "unicode", + "character", + "char", + "string", + "str", + "codepoint", + "code", + "point", + "is", + "detect", + "check" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/is-fullwidth-code-point/readme.md b/node_modules/is-fullwidth-code-point/readme.md new file mode 100644 index 00000000..093b0281 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/readme.md @@ -0,0 +1,39 @@ +# is-fullwidth-code-point [![Build Status](https://travis-ci.org/sindresorhus/is-fullwidth-code-point.svg?branch=master)](https://travis-ci.org/sindresorhus/is-fullwidth-code-point) + +> Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) + + +## Install + +``` +$ npm install --save is-fullwidth-code-point +``` + + +## Usage + +```js +const isFullwidthCodePoint = require('is-fullwidth-code-point'); + +isFullwidthCodePoint('谢'.codePointAt()); +//=> true + +isFullwidthCodePoint('a'.codePointAt()); +//=> false +``` + + +## API + +### isFullwidthCodePoint(input) + +#### input + +Type: `number` + +[Code point](https://en.wikipedia.org/wiki/Code_point) of a character. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/is-glob/LICENSE b/node_modules/is-glob/LICENSE new file mode 100644 index 00000000..3f2eca18 --- /dev/null +++ b/node_modules/is-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-glob/README.md b/node_modules/is-glob/README.md new file mode 100644 index 00000000..59444ebe --- /dev/null +++ b/node_modules/is-glob/README.md @@ -0,0 +1,206 @@ +# is-glob [![NPM version](https://img.shields.io/npm/v/is-glob.svg?style=flat)](https://www.npmjs.com/package/is-glob) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![NPM total downloads](https://img.shields.io/npm/dt/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![Linux Build Status](https://img.shields.io/travis/micromatch/is-glob.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/is-glob) [![Windows Build Status](https://img.shields.io/appveyor/ci/micromatch/is-glob.svg?style=flat&label=AppVeyor)](https://ci.appveyor.com/project/micromatch/is-glob) + +> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-glob +``` + +You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob). + +## Usage + +```js +var isGlob = require('is-glob'); +``` + +### Default behavior + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js'); +isGlob('*.js'); +isGlob('**/abc.js'); +isGlob('abc/*.js'); +isGlob('abc/(aaa|bbb).js'); +isGlob('abc/[a-z].js'); +isGlob('abc/{a,b}.js'); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js'); +isGlob('abc/!(a).js'); +isGlob('abc/+(a).js'); +isGlob('abc/*(a).js'); +isGlob('abc/?(a).js'); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('abc/\\@(a).js'); +isGlob('abc/\\!(a).js'); +isGlob('abc/\\+(a).js'); +isGlob('abc/\\*(a).js'); +isGlob('abc/\\?(a).js'); +isGlob('\\!foo.js'); +isGlob('\\*.js'); +isGlob('\\*\\*/abc.js'); +isGlob('abc/\\*.js'); +isGlob('abc/\\(aaa|bbb).js'); +isGlob('abc/\\[a-z].js'); +isGlob('abc/\\{a,b}.js'); +//=> false +``` + +Patterns that do not have glob patterns return `false`: + +```js +isGlob('abc.js'); +isGlob('abc/def/ghi.js'); +isGlob('foo.js'); +isGlob('abc/@.js'); +isGlob('abc/+.js'); +isGlob('abc/?.js'); +isGlob(); +isGlob(null); +//=> false +``` + +Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)): + +```js +isGlob(['**/*.js']); +isGlob(['foo.js']); +//=> false +``` + +### Option strict + +When `options.strict === false` the behavior is less strict in determining if a pattern is a glob. Meaning that +some patterns that would return `false` may return `true`. This is done so that matching libraries like [micromatch](https://github.com/micromatch/micromatch) have a chance at determining if the pattern is a glob or not. + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js', {strict: false}); +isGlob('*.js', {strict: false}); +isGlob('**/abc.js', {strict: false}); +isGlob('abc/*.js', {strict: false}); +isGlob('abc/(aaa|bbb).js', {strict: false}); +isGlob('abc/[a-z].js', {strict: false}); +isGlob('abc/{a,b}.js', {strict: false}); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js', {strict: false}); +isGlob('abc/!(a).js', {strict: false}); +isGlob('abc/+(a).js', {strict: false}); +isGlob('abc/*(a).js', {strict: false}); +isGlob('abc/?(a).js', {strict: false}); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('\\!foo.js', {strict: false}); +isGlob('\\*.js', {strict: false}); +isGlob('\\*\\*/abc.js', {strict: false}); +isGlob('abc/\\*.js', {strict: false}); +isGlob('abc/\\(aaa|bbb).js', {strict: false}); +isGlob('abc/\\[a-z].js', {strict: false}); +isGlob('abc/\\{a,b}.js', {strict: false}); +//=> false +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit") +* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks") +* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.") +* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 47 | [jonschlinkert](https://github.com/jonschlinkert) | +| 5 | [doowb](https://github.com/doowb) | +| 1 | [phated](https://github.com/phated) | +| 1 | [danhper](https://github.com/danhper) | +| 1 | [paulmillr](https://github.com/paulmillr) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on March 27, 2019._ \ No newline at end of file diff --git a/node_modules/is-glob/index.js b/node_modules/is-glob/index.js new file mode 100644 index 00000000..55826510 --- /dev/null +++ b/node_modules/is-glob/index.js @@ -0,0 +1,48 @@ +/*! + * is-glob + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +var isExtglob = require('is-extglob'); +var chars = { '{': '}', '(': ')', '[': ']'}; +var strictRegex = /\\(.)|(^!|\*|[\].+)]\?|\[[^\\\]]+\]|\{[^\\}]+\}|\(\?[:!=][^\\)]+\)|\([^|]+\|[^\\)]+\))/; +var relaxedRegex = /\\(.)|(^!|[*?{}()[\]]|\(\?)/; + +module.exports = function isGlob(str, options) { + if (typeof str !== 'string' || str === '') { + return false; + } + + if (isExtglob(str)) { + return true; + } + + var regex = strictRegex; + var match; + + // optionally relax regex + if (options && options.strict === false) { + regex = relaxedRegex; + } + + while ((match = regex.exec(str))) { + if (match[2]) return true; + var idx = match.index + match[0].length; + + // if an open bracket/brace/paren is escaped, + // set the index to the next closing character + var open = match[1]; + var close = open ? chars[open] : null; + if (open && close) { + var n = str.indexOf(close, idx); + if (n !== -1) { + idx = n + 1; + } + } + + str = str.slice(idx); + } + return false; +}; diff --git a/node_modules/is-glob/package.json b/node_modules/is-glob/package.json new file mode 100644 index 00000000..806000db --- /dev/null +++ b/node_modules/is-glob/package.json @@ -0,0 +1,81 @@ +{ + "name": "is-glob", + "description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.", + "version": "4.0.1", + "homepage": "https://github.com/micromatch/is-glob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Daniel Perez (https://tuvistavie.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/is-glob", + "bugs": { + "url": "https://github.com/micromatch/is-glob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "layout": "default", + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "assemble", + "base", + "update", + "verb" + ] + }, + "reflinks": [ + "assemble", + "bach", + "base", + "composer", + "gulp", + "has-glob", + "is-valid-glob", + "micromatch", + "npm", + "scaffold", + "verb", + "vinyl" + ] + } +} diff --git a/node_modules/is-installed-globally/index.js b/node_modules/is-installed-globally/index.js new file mode 100644 index 00000000..5092c2b2 --- /dev/null +++ b/node_modules/is-installed-globally/index.js @@ -0,0 +1,5 @@ +'use strict'; +const globalDirs = require('global-dirs'); +const isPathInside = require('is-path-inside'); + +module.exports = isPathInside(__dirname, globalDirs.yarn.packages) || isPathInside(__dirname, globalDirs.npm.packages); diff --git a/node_modules/is-installed-globally/license b/node_modules/is-installed-globally/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/is-installed-globally/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-installed-globally/package.json b/node_modules/is-installed-globally/package.json new file mode 100644 index 00000000..21b7a651 --- /dev/null +++ b/node_modules/is-installed-globally/package.json @@ -0,0 +1,48 @@ +{ + "name": "is-installed-globally", + "version": "0.1.0", + "description": "Check if your package was installed globally", + "license": "MIT", + "repository": "sindresorhus/is-installed-globally", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "global", + "package", + "globally", + "module", + "install", + "installed", + "npm", + "yarn", + "is", + "check", + "detect", + "local", + "locally", + "cli", + "bin", + "binary" + ], + "dependencies": { + "global-dirs": "^0.1.0", + "is-path-inside": "^1.0.0" + }, + "devDependencies": { + "ava": "*", + "execa": "^0.7.0", + "xo": "*" + } +} diff --git a/node_modules/is-installed-globally/readme.md b/node_modules/is-installed-globally/readme.md new file mode 100644 index 00000000..f3c93386 --- /dev/null +++ b/node_modules/is-installed-globally/readme.md @@ -0,0 +1,39 @@ +# is-installed-globally [![Build Status](https://travis-ci.org/sindresorhus/is-installed-globally.svg?branch=master)](https://travis-ci.org/sindresorhus/is-installed-globally) + +> Check if your package was installed globally + +Can be useful if your CLI needs different behavior when installed globally and locally. + + +## Install + +``` +$ npm install is-installed-globally +``` + + +## Usage + +```js +const isInstalledGlobally = require('is-installed-globally'); + +// With `npm install your-package` +console.log(isInstalledGlobally); +//=> false + +// With `npm install --global your-package` +console.log(isInstalledGlobally); +//=> true +``` + + +## Related + +- [import-global](https://github.com/sindresorhus/import-global) - Import a globally installed module +- [resolve-global](https://github.com/sindresorhus/resolve-global) - Resolve the path of a globally installed module +- [global-dirs](https://github.com/sindresorhus/global-dirs) - Get the directory of globally installed packages and binaries + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/is-npm/index.js b/node_modules/is-npm/index.js new file mode 100644 index 00000000..b5f3c27a --- /dev/null +++ b/node_modules/is-npm/index.js @@ -0,0 +1,4 @@ +'use strict'; +module.exports = 'npm_config_username' in process.env || + 'npm_package_name' in process.env || + 'npm_config_heading' in process.env; diff --git a/node_modules/is-npm/package.json b/node_modules/is-npm/package.json new file mode 100644 index 00000000..5c6f3800 --- /dev/null +++ b/node_modules/is-npm/package.json @@ -0,0 +1,32 @@ +{ + "name": "is-npm", + "version": "1.0.0", + "description": "Check if your code is running as an npm script", + "license": "MIT", + "repository": "sindresorhus/is-npm", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "http://sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "npm", + "is", + "check", + "detect", + "env", + "environment" + ], + "devDependencies": { + "ava": "0.0.3" + } +} diff --git a/node_modules/is-npm/readme.md b/node_modules/is-npm/readme.md new file mode 100644 index 00000000..84833ec6 --- /dev/null +++ b/node_modules/is-npm/readme.md @@ -0,0 +1,30 @@ +# is-npm [![Build Status](https://travis-ci.org/sindresorhus/is-npm.svg?branch=master)](https://travis-ci.org/sindresorhus/is-npm) + +> Check if your code is running as an [npm script](https://www.npmjs.org/doc/misc/npm-scripts.html) + + +## Install + +```sh +$ npm install --save is-npm +``` + + +## Usage + +```js +var isNpm = require('is-npm'); +console.log(isNpm); +``` + +```sh +$ node foo.js +#=> false +$ npm run foo +#=> true +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/is-number/LICENSE b/node_modules/is-number/LICENSE new file mode 100644 index 00000000..842218cf --- /dev/null +++ b/node_modules/is-number/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-number/README.md b/node_modules/is-number/README.md new file mode 100644 index 00000000..281165dc --- /dev/null +++ b/node_modules/is-number/README.md @@ -0,0 +1,115 @@ +# is-number [![NPM version](https://img.shields.io/npm/v/is-number.svg?style=flat)](https://www.npmjs.com/package/is-number) [![NPM downloads](https://img.shields.io/npm/dm/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-number.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-number) + +> Returns true if the value is a number. comprehensive tests. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-number +``` + +## Usage + +To understand some of the rationale behind the decisions made in this library (and to learn about some oddities of number evaluation in JavaScript), [see this gist](https://gist.github.com/jonschlinkert/e30c70c713da325d0e81). + +```js +var isNumber = require('is-number'); +``` + +### true + +See the [tests](./test.js) for more examples. + +```js +isNumber(5e3) //=> 'true' +isNumber(0xff) //=> 'true' +isNumber(-1.1) //=> 'true' +isNumber(0) //=> 'true' +isNumber(1) //=> 'true' +isNumber(1.1) //=> 'true' +isNumber(10) //=> 'true' +isNumber(10.10) //=> 'true' +isNumber(100) //=> 'true' +isNumber('-1.1') //=> 'true' +isNumber('0') //=> 'true' +isNumber('012') //=> 'true' +isNumber('0xff') //=> 'true' +isNumber('1') //=> 'true' +isNumber('1.1') //=> 'true' +isNumber('10') //=> 'true' +isNumber('10.10') //=> 'true' +isNumber('100') //=> 'true' +isNumber('5e3') //=> 'true' +isNumber(parseInt('012')) //=> 'true' +isNumber(parseFloat('012')) //=> 'true' +``` + +### False + +See the [tests](./test.js) for more examples. + +```js +isNumber('foo') //=> 'false' +isNumber([1]) //=> 'false' +isNumber([]) //=> 'false' +isNumber(function () {}) //=> 'false' +isNumber(Infinity) //=> 'false' +isNumber(NaN) //=> 'false' +isNumber(new Array('abc')) //=> 'false' +isNumber(new Array(2)) //=> 'false' +isNumber(new Buffer('abc')) //=> 'false' +isNumber(null) //=> 'false' +isNumber(undefined) //=> 'false' +isNumber({abc: 'abc'}) //=> 'false' +``` + +## About + +### Related projects + +* [even](https://www.npmjs.com/package/even): Get the even numbered items from an array. | [homepage](https://github.com/jonschlinkert/even "Get the even numbered items from an array.") +* [is-even](https://www.npmjs.com/package/is-even): Return true if the given number is even. | [homepage](https://github.com/jonschlinkert/is-even "Return true if the given number is even.") +* [is-odd](https://www.npmjs.com/package/is-odd): Returns true if the given number is odd. | [homepage](https://github.com/jonschlinkert/is-odd "Returns true if the given number is odd.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") +* [odd](https://www.npmjs.com/package/odd): Get the odd numbered items from an array. | [homepage](https://github.com/jonschlinkert/odd "Get the odd numbered items from an array.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/is-number/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.30, on September 10, 2016._ \ No newline at end of file diff --git a/node_modules/is-number/index.js b/node_modules/is-number/index.js new file mode 100644 index 00000000..7a2a45be --- /dev/null +++ b/node_modules/is-number/index.js @@ -0,0 +1,22 @@ +/*! + * is-number + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var typeOf = require('kind-of'); + +module.exports = function isNumber(num) { + var type = typeOf(num); + + if (type === 'string') { + if (!num.trim()) return false; + } else if (type !== 'number') { + return false; + } + + return (num - num + 1) >= 0; +}; diff --git a/node_modules/is-number/package.json b/node_modules/is-number/package.json new file mode 100644 index 00000000..8c1f9ab4 --- /dev/null +++ b/node_modules/is-number/package.json @@ -0,0 +1,83 @@ +{ + "name": "is-number", + "description": "Returns true if the value is a number. comprehensive tests.", + "version": "3.0.0", + "homepage": "https://github.com/jonschlinkert/is-number", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Charlike Mike Reagent (http://www.tunnckocore.tk)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/is-number", + "bugs": { + "url": "https://github.com/jonschlinkert/is-number/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "kind-of": "^3.0.2" + }, + "devDependencies": { + "benchmarked": "^0.2.5", + "chalk": "^1.1.3", + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "check", + "coerce", + "coercion", + "integer", + "is", + "is-nan", + "is-num", + "is-number", + "istype", + "kind", + "math", + "nan", + "num", + "number", + "numeric", + "test", + "type", + "typeof", + "value" + ], + "verb": { + "related": { + "list": [ + "even", + "is-even", + "is-odd", + "is-primitive", + "kind-of", + "odd" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "reflinks": [ + "verb", + "verb-generate-readme" + ] + } +} \ No newline at end of file diff --git a/node_modules/is-obj/index.js b/node_modules/is-obj/index.js new file mode 100644 index 00000000..4d023bc6 --- /dev/null +++ b/node_modules/is-obj/index.js @@ -0,0 +1,5 @@ +'use strict'; +module.exports = function (x) { + var type = typeof x; + return x !== null && (type === 'object' || type === 'function'); +}; diff --git a/node_modules/is-obj/license b/node_modules/is-obj/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/is-obj/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-obj/package.json b/node_modules/is-obj/package.json new file mode 100644 index 00000000..c441d272 --- /dev/null +++ b/node_modules/is-obj/package.json @@ -0,0 +1,33 @@ +{ + "name": "is-obj", + "version": "1.0.1", + "description": "Check if a value is an object", + "license": "MIT", + "repository": "sindresorhus/is-obj", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "obj", + "object", + "is", + "check", + "test", + "type" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/is-obj/readme.md b/node_modules/is-obj/readme.md new file mode 100644 index 00000000..d3110264 --- /dev/null +++ b/node_modules/is-obj/readme.md @@ -0,0 +1,34 @@ +# is-obj [![Build Status](https://travis-ci.org/sindresorhus/is-obj.svg?branch=master)](https://travis-ci.org/sindresorhus/is-obj) + +> Check if a value is an object + +Keep in mind that array, function, regexp, etc, are objects in JavaScript.
+See [`is-plain-obj`](https://github.com/sindresorhus/is-plain-obj) if you want to check for plain objects. + + +## Install + +``` +$ npm install --save is-obj +``` + + +## Usage + +```js +const isObj = require('is-obj'); + +isObj({foo: 'bar'}); +//=> true + +isObj([1, 2, 3]); +//=> true + +isObj('foo'); +//=> false +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/is-path-inside/index.js b/node_modules/is-path-inside/index.js new file mode 100644 index 00000000..0a4d2fd1 --- /dev/null +++ b/node_modules/is-path-inside/index.js @@ -0,0 +1,14 @@ +'use strict'; +var path = require('path'); +var pathIsInside = require('path-is-inside'); + +module.exports = function (a, b) { + a = path.resolve(a); + b = path.resolve(b); + + if (a === b) { + return false; + } + + return pathIsInside(a, b); +}; diff --git a/node_modules/is-path-inside/license b/node_modules/is-path-inside/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/is-path-inside/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-path-inside/package.json b/node_modules/is-path-inside/package.json new file mode 100644 index 00000000..4a070d27 --- /dev/null +++ b/node_modules/is-path-inside/package.json @@ -0,0 +1,37 @@ +{ + "name": "is-path-inside", + "version": "1.0.1", + "description": "Check if a path is inside another path", + "license": "MIT", + "repository": "sindresorhus/is-path-inside", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "path", + "inside", + "folder", + "directory", + "dir", + "file", + "resolve" + ], + "dependencies": { + "path-is-inside": "^1.0.1" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/is-path-inside/readme.md b/node_modules/is-path-inside/readme.md new file mode 100644 index 00000000..cc5f5162 --- /dev/null +++ b/node_modules/is-path-inside/readme.md @@ -0,0 +1,34 @@ +# is-path-inside [![Build Status](https://travis-ci.org/sindresorhus/is-path-inside.svg?branch=master)](https://travis-ci.org/sindresorhus/is-path-inside) + +> Check if a path is inside another path + + +## Install + +``` +$ npm install --save is-path-inside +``` + + +## Usage + +```js +var isPathInside = require('is-path-inside'); + +isPathInside('a/b/c', 'a/b'); +//=> true + +isPathInside('a/b/c', 'x/y'); +//=> false + +isPathInside('a/b/c', 'a/b/c'); +//=> false + +isPathInside('/Users/sindresorhus/dev/unicorn', '/Users/sindresorhus'); +//=> true +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/is-plain-object/LICENSE b/node_modules/is-plain-object/LICENSE new file mode 100644 index 00000000..3f2eca18 --- /dev/null +++ b/node_modules/is-plain-object/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-plain-object/README.md b/node_modules/is-plain-object/README.md new file mode 100644 index 00000000..1f9d0c82 --- /dev/null +++ b/node_modules/is-plain-object/README.md @@ -0,0 +1,104 @@ +# is-plain-object [![NPM version](https://img.shields.io/npm/v/is-plain-object.svg?style=flat)](https://www.npmjs.com/package/is-plain-object) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![NPM total downloads](https://img.shields.io/npm/dt/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-plain-object.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-plain-object) + +> Returns true if an object was created by the `Object` constructor. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-plain-object +``` + +Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null. + +## Usage + +```js +var isPlainObject = require('is-plain-object'); +``` + +**true** when created by the `Object` constructor. + +```js +isPlainObject(Object.create({})); +//=> true +isPlainObject(Object.create(Object.prototype)); +//=> true +isPlainObject({foo: 'bar'}); +//=> true +isPlainObject({}); +//=> true +``` + +**false** when not created by the `Object` constructor. + +```js +isPlainObject(1); +//=> false +isPlainObject(['foo', 'bar']); +//=> false +isPlainObject([]); +//=> false +isPlainObject(new Foo); +//=> false +isPlainObject(null); +//=> false +isPlainObject(Object.create(null)); +//=> false +``` + +## About + +### Related projects + +* [is-number](https://www.npmjs.com/package/is-number): Returns true if the value is a number. comprehensive tests. | [homepage](https://github.com/jonschlinkert/is-number "Returns true if the value is a number. comprehensive tests.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 17 | [jonschlinkert](https://github.com/jonschlinkert) | +| 6 | [stevenvachon](https://github.com/stevenvachon) | +| 3 | [onokumus](https://github.com/onokumus) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 11, 2017._ \ No newline at end of file diff --git a/node_modules/is-plain-object/index.d.ts b/node_modules/is-plain-object/index.d.ts new file mode 100644 index 00000000..74a44e97 --- /dev/null +++ b/node_modules/is-plain-object/index.d.ts @@ -0,0 +1,5 @@ +export = isPlainObject; + +declare function isPlainObject(o: any): boolean; + +declare namespace isPlainObject {} diff --git a/node_modules/is-plain-object/index.js b/node_modules/is-plain-object/index.js new file mode 100644 index 00000000..c3284849 --- /dev/null +++ b/node_modules/is-plain-object/index.js @@ -0,0 +1,37 @@ +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isObject = require('isobject'); + +function isObjectObject(o) { + return isObject(o) === true + && Object.prototype.toString.call(o) === '[object Object]'; +} + +module.exports = function isPlainObject(o) { + var ctor,prot; + + if (isObjectObject(o) === false) return false; + + // If has modified constructor + ctor = o.constructor; + if (typeof ctor !== 'function') return false; + + // If has modified prototype + prot = ctor.prototype; + if (isObjectObject(prot) === false) return false; + + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } + + // Most likely a plain Object + return true; +}; diff --git a/node_modules/is-plain-object/package.json b/node_modules/is-plain-object/package.json new file mode 100644 index 00000000..dd604986 --- /dev/null +++ b/node_modules/is-plain-object/package.json @@ -0,0 +1,79 @@ +{ + "name": "is-plain-object", + "description": "Returns true if an object was created by the `Object` constructor.", + "version": "2.0.4", + "homepage": "https://github.com/jonschlinkert/is-plain-object", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Osman Nuri Okumuş (http://onokumus.com)", + "Steven Vachon (https://svachon.com)", + "(https://github.com/wtgtybhertgeghgtwtg)" + ], + "repository": "jonschlinkert/is-plain-object", + "bugs": { + "url": "https://github.com/jonschlinkert/is-plain-object/issues" + }, + "license": "MIT", + "files": [ + "index.d.ts", + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "browserify": "browserify index.js --standalone isPlainObject | uglifyjs --compress --mangle -o browser/is-plain-object.js", + "test_browser": "mocha-phantomjs test/browser.html", + "test_node": "mocha", + "test": "npm run test_node && npm run browserify && npm run test_browser" + }, + "dependencies": { + "isobject": "^3.0.1" + }, + "devDependencies": { + "browserify": "^14.4.0", + "chai": "^4.0.2", + "gulp-format-md": "^1.0.0", + "mocha": "^3.4.2", + "mocha-phantomjs": "^4.1.0", + "phantomjs": "^2.1.7", + "uglify-js": "^3.0.24" + }, + "keywords": [ + "check", + "is", + "is-object", + "isobject", + "javascript", + "kind", + "kind-of", + "object", + "plain", + "type", + "typeof", + "value" + ], + "types": "index.d.ts", + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "is-number", + "isobject", + "kind-of" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-redirect/index.js b/node_modules/is-redirect/index.js new file mode 100644 index 00000000..75ec0090 --- /dev/null +++ b/node_modules/is-redirect/index.js @@ -0,0 +1,14 @@ +'use strict'; +module.exports = function (x) { + if (typeof x !== 'number') { + throw new TypeError('Expected a number'); + } + + return x === 300 || + x === 301 || + x === 302 || + x === 303 || + x === 305 || + x === 307 || + x === 308; +}; diff --git a/node_modules/is-redirect/license b/node_modules/is-redirect/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/is-redirect/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-redirect/package.json b/node_modules/is-redirect/package.json new file mode 100644 index 00000000..b9b22856 --- /dev/null +++ b/node_modules/is-redirect/package.json @@ -0,0 +1,35 @@ +{ + "name": "is-redirect", + "version": "1.0.0", + "description": "Check if a number is a redirect HTTP status code", + "license": "MIT", + "repository": "sindresorhus/is-redirect", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "redirect", + "http", + "https", + "status", + "code", + "codes", + "is", + "check", + "detect" + ], + "devDependencies": { + "ava": "0.0.4" + } +} diff --git a/node_modules/is-redirect/readme.md b/node_modules/is-redirect/readme.md new file mode 100644 index 00000000..e9f0a393 --- /dev/null +++ b/node_modules/is-redirect/readme.md @@ -0,0 +1,28 @@ +# is-redirect [![Build Status](https://travis-ci.org/sindresorhus/is-redirect.svg?branch=master)](https://travis-ci.org/sindresorhus/is-redirect) + +> Check if a number is a [redirect HTTP status code](http://en.wikipedia.org/wiki/List_of_HTTP_status_codes#3xx_Redirection) + + +## Install + +``` +$ npm install --save is-redirect +``` + + +## Usage + +```js +var isRedirect = require('is-redirect'); + +isRedirect(302); +//=> true + +isRedirect(200); +//=> false +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/is-retry-allowed/index.js b/node_modules/is-retry-allowed/index.js new file mode 100644 index 00000000..663ee338 --- /dev/null +++ b/node_modules/is-retry-allowed/index.js @@ -0,0 +1,60 @@ +'use strict'; + +var WHITELIST = [ + 'ETIMEDOUT', + 'ECONNRESET', + 'EADDRINUSE', + 'ESOCKETTIMEDOUT', + 'ECONNREFUSED', + 'EPIPE' +]; + +var BLACKLIST = [ + 'ENOTFOUND', + 'ENETUNREACH', + + // SSL errors from https://github.com/nodejs/node/blob/ed3d8b13ee9a705d89f9e0397d9e96519e7e47ac/src/node_crypto.cc#L1950 + 'UNABLE_TO_GET_ISSUER_CERT', + 'UNABLE_TO_GET_CRL', + 'UNABLE_TO_DECRYPT_CERT_SIGNATURE', + 'UNABLE_TO_DECRYPT_CRL_SIGNATURE', + 'UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY', + 'CERT_SIGNATURE_FAILURE', + 'CRL_SIGNATURE_FAILURE', + 'CERT_NOT_YET_VALID', + 'CERT_HAS_EXPIRED', + 'CRL_NOT_YET_VALID', + 'CRL_HAS_EXPIRED', + 'ERROR_IN_CERT_NOT_BEFORE_FIELD', + 'ERROR_IN_CERT_NOT_AFTER_FIELD', + 'ERROR_IN_CRL_LAST_UPDATE_FIELD', + 'ERROR_IN_CRL_NEXT_UPDATE_FIELD', + 'OUT_OF_MEM', + 'DEPTH_ZERO_SELF_SIGNED_CERT', + 'SELF_SIGNED_CERT_IN_CHAIN', + 'UNABLE_TO_GET_ISSUER_CERT_LOCALLY', + 'UNABLE_TO_VERIFY_LEAF_SIGNATURE', + 'CERT_CHAIN_TOO_LONG', + 'CERT_REVOKED', + 'INVALID_CA', + 'PATH_LENGTH_EXCEEDED', + 'INVALID_PURPOSE', + 'CERT_UNTRUSTED', + 'CERT_REJECTED' +]; + +module.exports = function (err) { + if (!err || !err.code) { + return true; + } + + if (WHITELIST.indexOf(err.code) !== -1) { + return true; + } + + if (BLACKLIST.indexOf(err.code) !== -1) { + return false; + } + + return true; +}; diff --git a/node_modules/is-retry-allowed/license b/node_modules/is-retry-allowed/license new file mode 100644 index 00000000..1aeb74fd --- /dev/null +++ b/node_modules/is-retry-allowed/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Vsevolod Strukchinsky (github.com/floatdrop) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-retry-allowed/package.json b/node_modules/is-retry-allowed/package.json new file mode 100644 index 00000000..452b76f5 --- /dev/null +++ b/node_modules/is-retry-allowed/package.json @@ -0,0 +1,29 @@ +{ + "name": "is-retry-allowed", + "version": "1.1.0", + "description": "My prime module", + "license": "MIT", + "repository": "floatdrop/is-retry-allowed", + "author": { + "name": "Vsevolod Strukchinsky", + "email": "floatdrop@gmail.com", + "url": "github.com/floatdrop" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "" + ], + "dependencies": {}, + "devDependencies": { + "ava": "^0.8.0", + "xo": "^0.12.1" + } +} diff --git a/node_modules/is-retry-allowed/readme.md b/node_modules/is-retry-allowed/readme.md new file mode 100644 index 00000000..4212d099 --- /dev/null +++ b/node_modules/is-retry-allowed/readme.md @@ -0,0 +1,42 @@ +# is-retry-allowed [![Build Status](https://travis-ci.org/floatdrop/is-retry-allowed.svg?branch=master)](https://travis-ci.org/floatdrop/is-retry-allowed) + +Is retry allowed for Error? + + +## Install + +``` +$ npm install --save is-retry-allowed +``` + + +## Usage + +```js +const isRetryAllowed = require('is-retry-allowed'); + +isRetryAllowed({code: 'ETIMEDOUT'}); +//=> true + +isRetryAllowed({code: 'ENOTFOUND'}); +//=> false + +isRetryAllowed({}); +//=> true +``` + + +## API + +### isRetryAllowed(error) + +#### error + +Type: `object` + +Object with `code` property, which will be used to determine retry. + + +## License + +MIT © [Vsevolod Strukchinsky](http://github.com/floatdrop) diff --git a/node_modules/is-stream/index.js b/node_modules/is-stream/index.js new file mode 100644 index 00000000..6f7ec91a --- /dev/null +++ b/node_modules/is-stream/index.js @@ -0,0 +1,21 @@ +'use strict'; + +var isStream = module.exports = function (stream) { + return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; +}; + +isStream.writable = function (stream) { + return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; +}; + +isStream.readable = function (stream) { + return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; +}; + +isStream.duplex = function (stream) { + return isStream.writable(stream) && isStream.readable(stream); +}; + +isStream.transform = function (stream) { + return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; +}; diff --git a/node_modules/is-stream/license b/node_modules/is-stream/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/is-stream/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-stream/package.json b/node_modules/is-stream/package.json new file mode 100644 index 00000000..0308918d --- /dev/null +++ b/node_modules/is-stream/package.json @@ -0,0 +1,38 @@ +{ + "name": "is-stream", + "version": "1.1.0", + "description": "Check if something is a Node.js stream", + "license": "MIT", + "repository": "sindresorhus/is-stream", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "stream", + "type", + "streams", + "writable", + "readable", + "duplex", + "transform", + "check", + "detect", + "is" + ], + "devDependencies": { + "ava": "*", + "tempfile": "^1.1.0", + "xo": "*" + } +} diff --git a/node_modules/is-stream/readme.md b/node_modules/is-stream/readme.md new file mode 100644 index 00000000..d8afce81 --- /dev/null +++ b/node_modules/is-stream/readme.md @@ -0,0 +1,42 @@ +# is-stream [![Build Status](https://travis-ci.org/sindresorhus/is-stream.svg?branch=master)](https://travis-ci.org/sindresorhus/is-stream) + +> Check if something is a [Node.js stream](https://nodejs.org/api/stream.html) + + +## Install + +``` +$ npm install --save is-stream +``` + + +## Usage + +```js +const fs = require('fs'); +const isStream = require('is-stream'); + +isStream(fs.createReadStream('unicorn.png')); +//=> true + +isStream({}); +//=> false +``` + + +## API + +### isStream(stream) + +#### isStream.writable(stream) + +#### isStream.readable(stream) + +#### isStream.duplex(stream) + +#### isStream.transform(stream) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/is-windows/LICENSE b/node_modules/is-windows/LICENSE new file mode 100644 index 00000000..f8de0630 --- /dev/null +++ b/node_modules/is-windows/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-windows/README.md b/node_modules/is-windows/README.md new file mode 100644 index 00000000..485bfdec --- /dev/null +++ b/node_modules/is-windows/README.md @@ -0,0 +1,95 @@ +# is-windows [![NPM version](https://img.shields.io/npm/v/is-windows.svg?style=flat)](https://www.npmjs.com/package/is-windows) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-windows.svg?style=flat)](https://npmjs.org/package/is-windows) [![NPM total downloads](https://img.shields.io/npm/dt/is-windows.svg?style=flat)](https://npmjs.org/package/is-windows) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-windows.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-windows) + +> Returns true if the platform is windows. UMD module, works with node.js, commonjs, browser, AMD, electron, etc. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-windows +``` + +## Heads up! + +As of `v0.2.0` this module always returns a function. + +## Node.js usage + +```js +var isWindows = require('is-windows'); + +console.log(isWindows()); +//=> returns true if the platform is windows +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-absolute](https://www.npmjs.com/package/is-absolute): Returns true if a file path is absolute. Does not rely on the path module… [more](https://github.com/jonschlinkert/is-absolute) | [homepage](https://github.com/jonschlinkert/is-absolute "Returns true if a file path is absolute. Does not rely on the path module and can be used as a polyfill for node.js native `path.isAbolute`.") +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [is-relative](https://www.npmjs.com/package/is-relative): Returns `true` if the path appears to be relative. | [homepage](https://github.com/jonschlinkert/is-relative "Returns `true` if the path appears to be relative.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [window-size](https://www.npmjs.com/package/window-size): Reliable way to get the height and width of terminal/console, since it's not calculated or… [more](https://github.com/jonschlinkert/window-size) | [homepage](https://github.com/jonschlinkert/window-size "Reliable way to get the height and width of terminal/console, since it's not calculated or updated the same way on all platforms, environments and node.js versions.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 11 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [doowb](https://github.com/doowb) | +| 1 | [SimenB](https://github.com/SimenB) | +| 1 | [gucong3000](https://github.com/gucong3000) | + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on February 14, 2018._ \ No newline at end of file diff --git a/node_modules/is-windows/index.js b/node_modules/is-windows/index.js new file mode 100644 index 00000000..55d43e09 --- /dev/null +++ b/node_modules/is-windows/index.js @@ -0,0 +1,27 @@ +/*! + * is-windows + * + * Copyright © 2015-2018, Jon Schlinkert. + * Released under the MIT License. + */ + +(function(factory) { + if (exports && typeof exports === 'object' && typeof module !== 'undefined') { + module.exports = factory(); + } else if (typeof define === 'function' && define.amd) { + define([], factory); + } else if (typeof window !== 'undefined') { + window.isWindows = factory(); + } else if (typeof global !== 'undefined') { + global.isWindows = factory(); + } else if (typeof self !== 'undefined') { + self.isWindows = factory(); + } else { + this.isWindows = factory(); + } +})(function() { + 'use strict'; + return function isWindows() { + return process && (process.platform === 'win32' || /^(msys|cygwin)$/.test(process.env.OSTYPE)); + }; +}); diff --git a/node_modules/is-windows/package.json b/node_modules/is-windows/package.json new file mode 100644 index 00000000..fca09f9c --- /dev/null +++ b/node_modules/is-windows/package.json @@ -0,0 +1,71 @@ +{ + "name": "is-windows", + "description": "Returns true if the platform is windows. UMD module, works with node.js, commonjs, browser, AMD, electron, etc.", + "version": "1.0.2", + "homepage": "https://github.com/jonschlinkert/is-windows", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Simen Bekkhus (https://github.com/SimenB)", + "刘祺 (gucong.co.cc)" + ], + "repository": "jonschlinkert/is-windows", + "bugs": { + "url": "https://github.com/jonschlinkert/is-windows/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "check", + "cywin", + "is", + "is-windows", + "nix", + "operating system", + "os", + "platform", + "process", + "unix", + "win", + "win32", + "windows" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "is-absolute", + "is-glob", + "is-relative", + "isobject", + "window-size" + ] + }, + "lint": { + "reflinks": true + }, + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/isarray/.npmignore b/node_modules/isarray/.npmignore new file mode 100644 index 00000000..3c3629e6 --- /dev/null +++ b/node_modules/isarray/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/isarray/.travis.yml b/node_modules/isarray/.travis.yml new file mode 100644 index 00000000..cc4dba29 --- /dev/null +++ b/node_modules/isarray/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/isarray/Makefile b/node_modules/isarray/Makefile new file mode 100644 index 00000000..787d56e1 --- /dev/null +++ b/node_modules/isarray/Makefile @@ -0,0 +1,6 @@ + +test: + @node_modules/.bin/tape test.js + +.PHONY: test + diff --git a/node_modules/isarray/README.md b/node_modules/isarray/README.md new file mode 100644 index 00000000..16d2c59c --- /dev/null +++ b/node_modules/isarray/README.md @@ -0,0 +1,60 @@ + +# isarray + +`Array#isArray` for older browsers. + +[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) +[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) + +[![browser support](https://ci.testling.com/juliangruber/isarray.png) +](https://ci.testling.com/juliangruber/isarray) + +## Usage + +```js +var isArray = require('isarray'); + +console.log(isArray([])); // => true +console.log(isArray({})); // => false +``` + +## Installation + +With [npm](http://npmjs.org) do + +```bash +$ npm install isarray +``` + +Then bundle for the browser with +[browserify](https://github.com/substack/browserify). + +With [component](http://component.io) do + +```bash +$ component install juliangruber/isarray +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/isarray/component.json b/node_modules/isarray/component.json new file mode 100644 index 00000000..9e31b683 --- /dev/null +++ b/node_modules/isarray/component.json @@ -0,0 +1,19 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : "juliangruber/isarray", + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : [ + "index.js" + ], + "dependencies" : {}, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/node_modules/isarray/index.js b/node_modules/isarray/index.js new file mode 100644 index 00000000..a57f6349 --- /dev/null +++ b/node_modules/isarray/index.js @@ -0,0 +1,5 @@ +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; diff --git a/node_modules/isarray/package.json b/node_modules/isarray/package.json new file mode 100644 index 00000000..1a4317a9 --- /dev/null +++ b/node_modules/isarray/package.json @@ -0,0 +1,45 @@ +{ + "name": "isarray", + "description": "Array#isArray for older browsers", + "version": "1.0.0", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/isarray.git" + }, + "homepage": "https://github.com/juliangruber/isarray", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "tape": "~2.13.4" + }, + "keywords": [ + "browser", + "isarray", + "array" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "scripts": { + "test": "tape test.js" + } +} diff --git a/node_modules/isarray/test.js b/node_modules/isarray/test.js new file mode 100644 index 00000000..e0c3444d --- /dev/null +++ b/node_modules/isarray/test.js @@ -0,0 +1,20 @@ +var isArray = require('./'); +var test = require('tape'); + +test('is array', function(t){ + t.ok(isArray([])); + t.notOk(isArray({})); + t.notOk(isArray(null)); + t.notOk(isArray(false)); + + var obj = {}; + obj[0] = true; + t.notOk(isArray(obj)); + + var arr = []; + arr.foo = 'bar'; + t.ok(isArray(arr)); + + t.end(); +}); + diff --git a/node_modules/isexe/.npmignore b/node_modules/isexe/.npmignore new file mode 100644 index 00000000..c1cb757a --- /dev/null +++ b/node_modules/isexe/.npmignore @@ -0,0 +1,2 @@ +.nyc_output/ +coverage/ diff --git a/node_modules/isexe/LICENSE b/node_modules/isexe/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/isexe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/isexe/README.md b/node_modules/isexe/README.md new file mode 100644 index 00000000..35769e84 --- /dev/null +++ b/node_modules/isexe/README.md @@ -0,0 +1,51 @@ +# isexe + +Minimal module to check if a file is executable, and a normal file. + +Uses `fs.stat` and tests against the `PATHEXT` environment variable on +Windows. + +## USAGE + +```javascript +var isexe = require('isexe') +isexe('some-file-name', function (err, isExe) { + if (err) { + console.error('probably file does not exist or something', err) + } else if (isExe) { + console.error('this thing can be run') + } else { + console.error('cannot be run') + } +}) + +// same thing but synchronous, throws errors +var isExe = isexe.sync('some-file-name') + +// treat errors as just "not executable" +isexe('maybe-missing-file', { ignoreErrors: true }, callback) +var isExe = isexe.sync('maybe-missing-file', { ignoreErrors: true }) +``` + +## API + +### `isexe(path, [options], [callback])` + +Check if the path is executable. If no callback provided, and a +global `Promise` object is available, then a Promise will be returned. + +Will raise whatever errors may be raised by `fs.stat`, unless +`options.ignoreErrors` is set to true. + +### `isexe.sync(path, [options])` + +Same as `isexe` but returns the value and throws any errors raised. + +### Options + +* `ignoreErrors` Treat all errors as "no, this is not executable", but + don't raise them. +* `uid` Number to use as the user id +* `gid` Number to use as the group id +* `pathExt` List of path extensions to use instead of `PATHEXT` + environment variable on Windows. diff --git a/node_modules/isexe/index.js b/node_modules/isexe/index.js new file mode 100644 index 00000000..553fb32b --- /dev/null +++ b/node_modules/isexe/index.js @@ -0,0 +1,57 @@ +var fs = require('fs') +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = require('./windows.js') +} else { + core = require('./mode.js') +} + +module.exports = isexe +isexe.sync = sync + +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') + } + + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) + } + + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false + } + } + cb(er, is) + }) +} + +function sync (path, options) { + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er + } + } +} diff --git a/node_modules/isexe/mode.js b/node_modules/isexe/mode.js new file mode 100644 index 00000000..1995ea4a --- /dev/null +++ b/node_modules/isexe/mode.js @@ -0,0 +1,41 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), options) +} + +function checkStat (stat, options) { + return stat.isFile() && checkMode(stat, options) +} + +function checkMode (stat, options) { + var mod = stat.mode + var uid = stat.uid + var gid = stat.gid + + var myUid = options.uid !== undefined ? + options.uid : process.getuid && process.getuid() + var myGid = options.gid !== undefined ? + options.gid : process.getgid && process.getgid() + + var u = parseInt('100', 8) + var g = parseInt('010', 8) + var o = parseInt('001', 8) + var ug = u | g + + var ret = (mod & o) || + (mod & g) && gid === myGid || + (mod & u) && uid === myUid || + (mod & ug) && myUid === 0 + + return ret +} diff --git a/node_modules/isexe/package.json b/node_modules/isexe/package.json new file mode 100644 index 00000000..e4526894 --- /dev/null +++ b/node_modules/isexe/package.json @@ -0,0 +1,31 @@ +{ + "name": "isexe", + "version": "2.0.0", + "description": "Minimal module to check if a file is executable.", + "main": "index.js", + "directories": { + "test": "test" + }, + "devDependencies": { + "mkdirp": "^0.5.1", + "rimraf": "^2.5.0", + "tap": "^10.3.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/isexe.git" + }, + "keywords": [], + "bugs": { + "url": "https://github.com/isaacs/isexe/issues" + }, + "homepage": "https://github.com/isaacs/isexe#readme" +} diff --git a/node_modules/isexe/test/basic.js b/node_modules/isexe/test/basic.js new file mode 100644 index 00000000..d926df64 --- /dev/null +++ b/node_modules/isexe/test/basic.js @@ -0,0 +1,221 @@ +var t = require('tap') +var fs = require('fs') +var path = require('path') +var fixture = path.resolve(__dirname, 'fixtures') +var meow = fixture + '/meow.cat' +var mine = fixture + '/mine.cat' +var ours = fixture + '/ours.cat' +var fail = fixture + '/fail.false' +var noent = fixture + '/enoent.exe' +var mkdirp = require('mkdirp') +var rimraf = require('rimraf') + +var isWindows = process.platform === 'win32' +var hasAccess = typeof fs.access === 'function' +var winSkip = isWindows && 'windows' +var accessSkip = !hasAccess && 'no fs.access function' +var hasPromise = typeof Promise === 'function' +var promiseSkip = !hasPromise && 'no global Promise' + +function reset () { + delete require.cache[require.resolve('../')] + return require('../') +} + +t.test('setup fixtures', function (t) { + rimraf.sync(fixture) + mkdirp.sync(fixture) + fs.writeFileSync(meow, '#!/usr/bin/env cat\nmeow\n') + fs.chmodSync(meow, parseInt('0755', 8)) + fs.writeFileSync(fail, '#!/usr/bin/env false\n') + fs.chmodSync(fail, parseInt('0644', 8)) + fs.writeFileSync(mine, '#!/usr/bin/env cat\nmine\n') + fs.chmodSync(mine, parseInt('0744', 8)) + fs.writeFileSync(ours, '#!/usr/bin/env cat\nours\n') + fs.chmodSync(ours, parseInt('0754', 8)) + t.end() +}) + +t.test('promise', { skip: promiseSkip }, function (t) { + var isexe = reset() + t.test('meow async', function (t) { + isexe(meow).then(function (is) { + t.ok(is) + t.end() + }) + }) + t.test('fail async', function (t) { + isexe(fail).then(function (is) { + t.notOk(is) + t.end() + }) + }) + t.test('noent async', function (t) { + isexe(noent).catch(function (er) { + t.ok(er) + t.end() + }) + }) + t.test('noent ignore async', function (t) { + isexe(noent, { ignoreErrors: true }).then(function (is) { + t.notOk(is) + t.end() + }) + }) + t.end() +}) + +t.test('no promise', function (t) { + global.Promise = null + var isexe = reset() + t.throws('try to meow a promise', function () { + isexe(meow) + }) + t.end() +}) + +t.test('access', { skip: accessSkip || winSkip }, function (t) { + runTest(t) +}) + +t.test('mode', { skip: winSkip }, function (t) { + delete fs.access + delete fs.accessSync + var isexe = reset() + t.ok(isexe.sync(ours, { uid: 0, gid: 0 })) + t.ok(isexe.sync(mine, { uid: 0, gid: 0 })) + runTest(t) +}) + +t.test('windows', function (t) { + global.TESTING_WINDOWS = true + var pathExt = '.EXE;.CAT;.CMD;.COM' + t.test('pathExt option', function (t) { + runTest(t, { pathExt: '.EXE;.CAT;.CMD;.COM' }) + }) + t.test('pathExt env', function (t) { + process.env.PATHEXT = pathExt + runTest(t) + }) + t.test('no pathExt', function (t) { + // with a pathExt of '', any filename is fine. + // so the "fail" one would still pass. + runTest(t, { pathExt: '', skipFail: true }) + }) + t.test('pathext with empty entry', function (t) { + // with a pathExt of '', any filename is fine. + // so the "fail" one would still pass. + runTest(t, { pathExt: ';' + pathExt, skipFail: true }) + }) + t.end() +}) + +t.test('cleanup', function (t) { + rimraf.sync(fixture) + t.end() +}) + +function runTest (t, options) { + var isexe = reset() + + var optionsIgnore = Object.create(options || {}) + optionsIgnore.ignoreErrors = true + + if (!options || !options.skipFail) { + t.notOk(isexe.sync(fail, options)) + } + t.notOk(isexe.sync(noent, optionsIgnore)) + if (!options) { + t.ok(isexe.sync(meow)) + } else { + t.ok(isexe.sync(meow, options)) + } + + t.ok(isexe.sync(mine, options)) + t.ok(isexe.sync(ours, options)) + t.throws(function () { + isexe.sync(noent, options) + }) + + t.test('meow async', function (t) { + if (!options) { + isexe(meow, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + } else { + isexe(meow, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + } + }) + + t.test('mine async', function (t) { + isexe(mine, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + }) + + t.test('ours async', function (t) { + isexe(ours, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + }) + + if (!options || !options.skipFail) { + t.test('fail async', function (t) { + isexe(fail, options, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + } + + t.test('noent async', function (t) { + isexe(noent, options, function (er, is) { + t.ok(er) + t.notOk(is) + t.end() + }) + }) + + t.test('noent ignore async', function (t) { + isexe(noent, optionsIgnore, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + + t.test('directory is not executable', function (t) { + isexe(__dirname, options, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + + t.end() +} diff --git a/node_modules/isexe/windows.js b/node_modules/isexe/windows.js new file mode 100644 index 00000000..34996734 --- /dev/null +++ b/node_modules/isexe/windows.js @@ -0,0 +1,42 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT + + if (!pathext) { + return true + } + + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true + } + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true + } + } + return false +} + +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false + } + return checkPathExt(path, options) +} + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) +} diff --git a/node_modules/isobject/LICENSE b/node_modules/isobject/LICENSE new file mode 100644 index 00000000..943e71d0 --- /dev/null +++ b/node_modules/isobject/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/isobject/README.md b/node_modules/isobject/README.md new file mode 100644 index 00000000..d01feaa4 --- /dev/null +++ b/node_modules/isobject/README.md @@ -0,0 +1,122 @@ +# isobject [![NPM version](https://img.shields.io/npm/v/isobject.svg?style=flat)](https://www.npmjs.com/package/isobject) [![NPM monthly downloads](https://img.shields.io/npm/dm/isobject.svg?style=flat)](https://npmjs.org/package/isobject) [![NPM total downloads](https://img.shields.io/npm/dt/isobject.svg?style=flat)](https://npmjs.org/package/isobject) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/isobject.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/isobject) + +> Returns true if the value is an object and not an array or null. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save isobject +``` + +Install with [yarn](https://yarnpkg.com): + +```sh +$ yarn add isobject +``` + +Use [is-plain-object](https://github.com/jonschlinkert/is-plain-object) if you want only objects that are created by the `Object` constructor. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install isobject +``` +Install with [bower](https://bower.io/) + +```sh +$ bower install isobject +``` + +## Usage + +```js +var isObject = require('isobject'); +``` + +**True** + +All of the following return `true`: + +```js +isObject({}); +isObject(Object.create({})); +isObject(Object.create(Object.prototype)); +isObject(Object.create(null)); +isObject({}); +isObject(new Foo); +isObject(/foo/); +``` + +**False** + +All of the following return `false`: + +```js +isObject(); +isObject(function () {}); +isObject(1); +isObject([]); +isObject(undefined); +isObject(null); +``` + +## About + +### Related projects + +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") +* [merge-deep](https://www.npmjs.com/package/merge-deep): Recursively merge values in a javascript object. | [homepage](https://github.com/jonschlinkert/merge-deep "Recursively merge values in a javascript object.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 29 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [doowb](https://github.com/doowb) | +| 1 | [magnudae](https://github.com/magnudae) | +| 1 | [LeSuisse](https://github.com/LeSuisse) | +| 1 | [tmcw](https://github.com/tmcw) | + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 30, 2017._ \ No newline at end of file diff --git a/node_modules/isobject/index.d.ts b/node_modules/isobject/index.d.ts new file mode 100644 index 00000000..55f81c27 --- /dev/null +++ b/node_modules/isobject/index.d.ts @@ -0,0 +1,5 @@ +export = isObject; + +declare function isObject(val: any): boolean; + +declare namespace isObject {} diff --git a/node_modules/isobject/index.js b/node_modules/isobject/index.js new file mode 100644 index 00000000..2d59958b --- /dev/null +++ b/node_modules/isobject/index.js @@ -0,0 +1,12 @@ +/*! + * isobject + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +module.exports = function isObject(val) { + return val != null && typeof val === 'object' && Array.isArray(val) === false; +}; diff --git a/node_modules/isobject/package.json b/node_modules/isobject/package.json new file mode 100644 index 00000000..62aa8c1b --- /dev/null +++ b/node_modules/isobject/package.json @@ -0,0 +1,74 @@ +{ + "name": "isobject", + "description": "Returns true if the value is an object and not an array or null.", + "version": "3.0.1", + "homepage": "https://github.com/jonschlinkert/isobject", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "(https://github.com/LeSuisse)", + "Brian Woodward (https://twitter.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Magnús Dæhlen (https://github.com/magnudae)", + "Tom MacWright (https://macwright.org)" + ], + "repository": "jonschlinkert/isobject", + "bugs": { + "url": "https://github.com/jonschlinkert/isobject/issues" + }, + "license": "MIT", + "files": [ + "index.d.ts", + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": {}, + "devDependencies": { + "gulp-format-md": "^0.1.9", + "mocha": "^2.4.5" + }, + "keywords": [ + "check", + "is", + "is-object", + "isobject", + "kind", + "kind-of", + "kindof", + "native", + "object", + "type", + "typeof", + "value" + ], + "types": "index.d.ts", + "verb": { + "related": { + "list": [ + "extend-shallow", + "is-plain-object", + "kind-of", + "merge-deep" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/kind-of/LICENSE b/node_modules/kind-of/LICENSE new file mode 100644 index 00000000..d734237b --- /dev/null +++ b/node_modules/kind-of/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/kind-of/README.md b/node_modules/kind-of/README.md new file mode 100644 index 00000000..6a9df36d --- /dev/null +++ b/node_modules/kind-of/README.md @@ -0,0 +1,261 @@ +# kind-of [![NPM version](https://img.shields.io/npm/v/kind-of.svg?style=flat)](https://www.npmjs.com/package/kind-of) [![NPM monthly downloads](https://img.shields.io/npm/dm/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![NPM total downloads](https://img.shields.io/npm/dt/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/kind-of.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/kind-of) + +> Get the native type of a value. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save kind-of +``` + +## Install + +Install with [bower](https://bower.io/) + +```sh +$ bower install kind-of --save +``` + +## Usage + +> es5, browser and es6 ready + +```js +var kindOf = require('kind-of'); + +kindOf(undefined); +//=> 'undefined' + +kindOf(null); +//=> 'null' + +kindOf(true); +//=> 'boolean' + +kindOf(false); +//=> 'boolean' + +kindOf(new Boolean(true)); +//=> 'boolean' + +kindOf(new Buffer('')); +//=> 'buffer' + +kindOf(42); +//=> 'number' + +kindOf(new Number(42)); +//=> 'number' + +kindOf('str'); +//=> 'string' + +kindOf(new String('str')); +//=> 'string' + +kindOf(arguments); +//=> 'arguments' + +kindOf({}); +//=> 'object' + +kindOf(Object.create(null)); +//=> 'object' + +kindOf(new Test()); +//=> 'object' + +kindOf(new Date()); +//=> 'date' + +kindOf([]); +//=> 'array' + +kindOf([1, 2, 3]); +//=> 'array' + +kindOf(new Array()); +//=> 'array' + +kindOf(/foo/); +//=> 'regexp' + +kindOf(new RegExp('foo')); +//=> 'regexp' + +kindOf(function () {}); +//=> 'function' + +kindOf(function * () {}); +//=> 'function' + +kindOf(new Function()); +//=> 'function' + +kindOf(new Map()); +//=> 'map' + +kindOf(new WeakMap()); +//=> 'weakmap' + +kindOf(new Set()); +//=> 'set' + +kindOf(new WeakSet()); +//=> 'weakset' + +kindOf(Symbol('str')); +//=> 'symbol' + +kindOf(new Int8Array()); +//=> 'int8array' + +kindOf(new Uint8Array()); +//=> 'uint8array' + +kindOf(new Uint8ClampedArray()); +//=> 'uint8clampedarray' + +kindOf(new Int16Array()); +//=> 'int16array' + +kindOf(new Uint16Array()); +//=> 'uint16array' + +kindOf(new Int32Array()); +//=> 'int32array' + +kindOf(new Uint32Array()); +//=> 'uint32array' + +kindOf(new Float32Array()); +//=> 'float32array' + +kindOf(new Float64Array()); +//=> 'float64array' +``` + +## Benchmarks + +Benchmarked against [typeof](http://github.com/CodingFu/typeof) and [type-of](https://github.com/ForbesLindesay/type-of). +Note that performaces is slower for es6 features `Map`, `WeakMap`, `Set` and `WeakSet`. + +```bash +#1: array + current x 23,329,397 ops/sec ±0.82% (94 runs sampled) + lib-type-of x 4,170,273 ops/sec ±0.55% (94 runs sampled) + lib-typeof x 9,686,935 ops/sec ±0.59% (98 runs sampled) + +#2: boolean + current x 27,197,115 ops/sec ±0.85% (94 runs sampled) + lib-type-of x 3,145,791 ops/sec ±0.73% (97 runs sampled) + lib-typeof x 9,199,562 ops/sec ±0.44% (99 runs sampled) + +#3: date + current x 20,190,117 ops/sec ±0.86% (92 runs sampled) + lib-type-of x 5,166,970 ops/sec ±0.74% (94 runs sampled) + lib-typeof x 9,610,821 ops/sec ±0.50% (96 runs sampled) + +#4: function + current x 23,855,460 ops/sec ±0.60% (97 runs sampled) + lib-type-of x 5,667,740 ops/sec ±0.54% (100 runs sampled) + lib-typeof x 10,010,644 ops/sec ±0.44% (100 runs sampled) + +#5: null + current x 27,061,047 ops/sec ±0.97% (96 runs sampled) + lib-type-of x 13,965,573 ops/sec ±0.62% (97 runs sampled) + lib-typeof x 8,460,194 ops/sec ±0.61% (97 runs sampled) + +#6: number + current x 25,075,682 ops/sec ±0.53% (99 runs sampled) + lib-type-of x 2,266,405 ops/sec ±0.41% (98 runs sampled) + lib-typeof x 9,821,481 ops/sec ±0.45% (99 runs sampled) + +#7: object + current x 3,348,980 ops/sec ±0.49% (99 runs sampled) + lib-type-of x 3,245,138 ops/sec ±0.60% (94 runs sampled) + lib-typeof x 9,262,952 ops/sec ±0.59% (99 runs sampled) + +#8: regex + current x 21,284,827 ops/sec ±0.72% (96 runs sampled) + lib-type-of x 4,689,241 ops/sec ±0.43% (100 runs sampled) + lib-typeof x 8,957,593 ops/sec ±0.62% (98 runs sampled) + +#9: string + current x 25,379,234 ops/sec ±0.58% (96 runs sampled) + lib-type-of x 3,635,148 ops/sec ±0.76% (93 runs sampled) + lib-typeof x 9,494,134 ops/sec ±0.49% (98 runs sampled) + +#10: undef + current x 27,459,221 ops/sec ±1.01% (93 runs sampled) + lib-type-of x 14,360,433 ops/sec ±0.52% (99 runs sampled) + lib-typeof x 23,202,868 ops/sec ±0.59% (94 runs sampled) + +``` + +## Optimizations + +In 7 out of 8 cases, this library is 2x-10x faster than other top libraries included in the benchmarks. There are a few things that lead to this performance advantage, none of them hard and fast rules, but all of them simple and repeatable in almost any code library: + +1. Optimize around the fastest and most common use cases first. Of course, this will change from project-to-project, but I took some time to understand how and why `typeof` checks were being used in my own libraries and other libraries I use a lot. +2. Optimize around bottlenecks - In other words, the order in which conditionals are implemented is significant, because each check is only as fast as the failing checks that came before it. Here, the biggest bottleneck by far is checking for plain objects (an object that was created by the `Object` constructor). I opted to make this check happen by process of elimination rather than brute force up front (e.g. by using something like `val.constructor.name`), so that every other type check would not be penalized it. +3. Don't do uneccessary processing - why do `.slice(8, -1).toLowerCase();` just to get the word `regex`? It's much faster to do `if (type === '[object RegExp]') return 'regex'` + +## About + +### Related projects + +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [is-number](https://www.npmjs.com/package/is-number): Returns true if the value is a number. comprehensive tests. | [homepage](https://github.com/jonschlinkert/is-number "Returns true if the value is a number. comprehensive tests.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 59 | [jonschlinkert](https://github.com/jonschlinkert) | +| 2 | [miguelmota](https://github.com/miguelmota) | +| 1 | [dtothefp](https://github.com/dtothefp) | +| 1 | [ksheedlo](https://github.com/ksheedlo) | +| 1 | [pdehaan](https://github.com/pdehaan) | +| 1 | [laggingreflex](https://github.com/laggingreflex) | + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on May 16, 2017._ \ No newline at end of file diff --git a/node_modules/kind-of/index.js b/node_modules/kind-of/index.js new file mode 100644 index 00000000..b52c2917 --- /dev/null +++ b/node_modules/kind-of/index.js @@ -0,0 +1,116 @@ +var isBuffer = require('is-buffer'); +var toString = Object.prototype.toString; + +/** + * Get the native `typeof` a value. + * + * @param {*} `val` + * @return {*} Native javascript type + */ + +module.exports = function kindOf(val) { + // primitivies + if (typeof val === 'undefined') { + return 'undefined'; + } + if (val === null) { + return 'null'; + } + if (val === true || val === false || val instanceof Boolean) { + return 'boolean'; + } + if (typeof val === 'string' || val instanceof String) { + return 'string'; + } + if (typeof val === 'number' || val instanceof Number) { + return 'number'; + } + + // functions + if (typeof val === 'function' || val instanceof Function) { + return 'function'; + } + + // array + if (typeof Array.isArray !== 'undefined' && Array.isArray(val)) { + return 'array'; + } + + // check for instances of RegExp and Date before calling `toString` + if (val instanceof RegExp) { + return 'regexp'; + } + if (val instanceof Date) { + return 'date'; + } + + // other objects + var type = toString.call(val); + + if (type === '[object RegExp]') { + return 'regexp'; + } + if (type === '[object Date]') { + return 'date'; + } + if (type === '[object Arguments]') { + return 'arguments'; + } + if (type === '[object Error]') { + return 'error'; + } + + // buffer + if (isBuffer(val)) { + return 'buffer'; + } + + // es6: Map, WeakMap, Set, WeakSet + if (type === '[object Set]') { + return 'set'; + } + if (type === '[object WeakSet]') { + return 'weakset'; + } + if (type === '[object Map]') { + return 'map'; + } + if (type === '[object WeakMap]') { + return 'weakmap'; + } + if (type === '[object Symbol]') { + return 'symbol'; + } + + // typed arrays + if (type === '[object Int8Array]') { + return 'int8array'; + } + if (type === '[object Uint8Array]') { + return 'uint8array'; + } + if (type === '[object Uint8ClampedArray]') { + return 'uint8clampedarray'; + } + if (type === '[object Int16Array]') { + return 'int16array'; + } + if (type === '[object Uint16Array]') { + return 'uint16array'; + } + if (type === '[object Int32Array]') { + return 'int32array'; + } + if (type === '[object Uint32Array]') { + return 'uint32array'; + } + if (type === '[object Float32Array]') { + return 'float32array'; + } + if (type === '[object Float64Array]') { + return 'float64array'; + } + + // must be a plain object + return 'object'; +}; diff --git a/node_modules/kind-of/package.json b/node_modules/kind-of/package.json new file mode 100644 index 00000000..5de879e1 --- /dev/null +++ b/node_modules/kind-of/package.json @@ -0,0 +1,90 @@ +{ + "name": "kind-of", + "description": "Get the native type of a value.", + "version": "3.2.2", + "homepage": "https://github.com/jonschlinkert/kind-of", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "David Fox-Powell (https://dtothefp.github.io/me)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Ken Sheedlo (kensheedlo.com)", + "laggingreflex (https://github.com/laggingreflex)", + "Miguel Mota (https://miguelmota.com)", + "Peter deHaan (http://about.me/peterdehaan)" + ], + "repository": "jonschlinkert/kind-of", + "bugs": { + "url": "https://github.com/jonschlinkert/kind-of/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "prepublish": "browserify -o browser.js -e index.js -s index --bare" + }, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "devDependencies": { + "ansi-bold": "^0.1.1", + "benchmarked": "^1.0.0", + "browserify": "^14.3.0", + "glob": "^7.1.1", + "gulp-format-md": "^0.1.12", + "mocha": "^3.3.0", + "type-of": "^2.0.1", + "typeof": "^1.0.0" + }, + "keywords": [ + "arguments", + "array", + "boolean", + "check", + "date", + "function", + "is", + "is-type", + "is-type-of", + "kind", + "kind-of", + "number", + "object", + "of", + "regexp", + "string", + "test", + "type", + "type-of", + "typeof", + "types" + ], + "verb": { + "related": { + "list": [ + "is-glob", + "is-number", + "is-primitive" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/latest-version/index.js b/node_modules/latest-version/index.js new file mode 100644 index 00000000..bfb79d1c --- /dev/null +++ b/node_modules/latest-version/index.js @@ -0,0 +1,4 @@ +'use strict'; +const packageJson = require('package-json'); + +module.exports = name => packageJson(name.toLowerCase()).then(data => data.version); diff --git a/node_modules/latest-version/license b/node_modules/latest-version/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/latest-version/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/latest-version/package.json b/node_modules/latest-version/package.json new file mode 100644 index 00000000..f9abf68c --- /dev/null +++ b/node_modules/latest-version/package.json @@ -0,0 +1,39 @@ +{ + "name": "latest-version", + "version": "3.1.0", + "description": "Get the latest version of an npm package", + "license": "MIT", + "repository": "sindresorhus/latest-version", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "latest", + "version", + "npm", + "pkg", + "package", + "package.json", + "current", + "module" + ], + "dependencies": { + "package-json": "^4.0.0" + }, + "devDependencies": { + "ava": "*", + "semver-regex": "^1.0.0", + "xo": "*" + } +} diff --git a/node_modules/latest-version/readme.md b/node_modules/latest-version/readme.md new file mode 100644 index 00000000..3fcffdd0 --- /dev/null +++ b/node_modules/latest-version/readme.md @@ -0,0 +1,40 @@ +# latest-version [![Build Status](https://travis-ci.org/sindresorhus/latest-version.svg?branch=master)](https://travis-ci.org/sindresorhus/latest-version) + +> Get the latest version of an npm package + +Fetches the version directly from the registry instead of depending on the massive [npm](https://github.com/npm/npm/blob/8b5e7b6ae5b4cd2d7d62eaf93b1428638b387072/package.json#L37-L85) module like the [latest](https://github.com/bahamas10/node-latest) module does. + + +## Install + +``` +$ npm install --save latest-version +``` + + +## Usage + +```js +const latestVersion = require('latest-version'); + +latestVersion('ava').then(version => { + console.log(version); + //=> '0.18.0' +}); + +latestVersion('@sindresorhus/df').then(version => { + console.log(version); + //=> '1.0.1' +}); +``` + + +## Related + +- [latest-version-cli](https://github.com/sindresorhus/latest-version-cli) - CLI for this module +- [package-json](https://github.com/sindresorhus/package-json) - Get the package.json of a package from the npm registry + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/lines-and-columns/LICENSE b/node_modules/lines-and-columns/LICENSE new file mode 100644 index 00000000..12978ece --- /dev/null +++ b/node_modules/lines-and-columns/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Brian Donovan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/lines-and-columns/README.md b/node_modules/lines-and-columns/README.md new file mode 100644 index 00000000..e8f5dcaa --- /dev/null +++ b/node_modules/lines-and-columns/README.md @@ -0,0 +1,29 @@ +# lines-and-columns + +Maps lines and columns to character offsets and back. This is useful for parsers +and other text processors that deal in character ranges but process text with +meaningful lines and columns. + +## Install + +``` +$ npm install [--save] lines-and-columns +``` + +## Usage + +```js +import LinesAndColumns from 'lines-and-columns'; + +const lines = new LinesAndColumns( +`table { + border: 0 +}`); + +lines.locationForIndex(9); // { line: 1, column: 1 } +lines.indexForLocation({ line: 1, column: 2 }); // 10 +``` + +## License + +MIT diff --git a/node_modules/lines-and-columns/dist/index.d.ts b/node_modules/lines-and-columns/dist/index.d.ts new file mode 100644 index 00000000..93341f54 --- /dev/null +++ b/node_modules/lines-and-columns/dist/index.d.ts @@ -0,0 +1,12 @@ +export declare type SourceLocation = { + line: number; + column: number; +}; +export default class LinesAndColumns { + private string; + private offsets; + constructor(string: string); + locationForIndex(index: number): SourceLocation | null; + indexForLocation(location: SourceLocation): number | null; + private lengthOfLine(line); +} diff --git a/node_modules/lines-and-columns/dist/index.js b/node_modules/lines-and-columns/dist/index.js new file mode 100644 index 00000000..a2469c63 --- /dev/null +++ b/node_modules/lines-and-columns/dist/index.js @@ -0,0 +1,58 @@ +"use strict"; +var LF = '\n'; +var CR = '\r'; +var LinesAndColumns = (function () { + function LinesAndColumns(string) { + this.string = string; + var offsets = [0]; + for (var offset = 0; offset < string.length;) { + switch (string[offset]) { + case LF: + offset += LF.length; + offsets.push(offset); + break; + case CR: + offset += CR.length; + if (string[offset] === LF) { + offset += LF.length; + } + offsets.push(offset); + break; + default: + offset++; + break; + } + } + this.offsets = offsets; + } + LinesAndColumns.prototype.locationForIndex = function (index) { + if (index < 0 || index > this.string.length) { + return null; + } + var line = 0; + var offsets = this.offsets; + while (offsets[line + 1] <= index) { + line++; + } + var column = index - offsets[line]; + return { line: line, column: column }; + }; + LinesAndColumns.prototype.indexForLocation = function (location) { + var line = location.line, column = location.column; + if (line < 0 || line >= this.offsets.length) { + return null; + } + if (column < 0 || column > this.lengthOfLine(line)) { + return null; + } + return this.offsets[line] + column; + }; + LinesAndColumns.prototype.lengthOfLine = function (line) { + var offset = this.offsets[line]; + var nextOffset = line === this.offsets.length - 1 ? this.string.length : this.offsets[line + 1]; + return nextOffset - offset; + }; + return LinesAndColumns; +}()); +exports.__esModule = true; +exports["default"] = LinesAndColumns; diff --git a/node_modules/lines-and-columns/dist/index.mjs b/node_modules/lines-and-columns/dist/index.mjs new file mode 100644 index 00000000..e8519eec --- /dev/null +++ b/node_modules/lines-and-columns/dist/index.mjs @@ -0,0 +1,56 @@ +var LF = '\n'; +var CR = '\r'; +var LinesAndColumns = (function () { + function LinesAndColumns(string) { + this.string = string; + var offsets = [0]; + for (var offset = 0; offset < string.length;) { + switch (string[offset]) { + case LF: + offset += LF.length; + offsets.push(offset); + break; + case CR: + offset += CR.length; + if (string[offset] === LF) { + offset += LF.length; + } + offsets.push(offset); + break; + default: + offset++; + break; + } + } + this.offsets = offsets; + } + LinesAndColumns.prototype.locationForIndex = function (index) { + if (index < 0 || index > this.string.length) { + return null; + } + var line = 0; + var offsets = this.offsets; + while (offsets[line + 1] <= index) { + line++; + } + var column = index - offsets[line]; + return { line: line, column: column }; + }; + LinesAndColumns.prototype.indexForLocation = function (location) { + var line = location.line, column = location.column; + if (line < 0 || line >= this.offsets.length) { + return null; + } + if (column < 0 || column > this.lengthOfLine(line)) { + return null; + } + return this.offsets[line] + column; + }; + LinesAndColumns.prototype.lengthOfLine = function (line) { + var offset = this.offsets[line]; + var nextOffset = line === this.offsets.length - 1 ? this.string.length : this.offsets[line + 1]; + return nextOffset - offset; + }; + return LinesAndColumns; +}()); +export default LinesAndColumns; diff --git a/node_modules/lines-and-columns/package.json b/node_modules/lines-and-columns/package.json new file mode 100644 index 00000000..646cb230 --- /dev/null +++ b/node_modules/lines-and-columns/package.json @@ -0,0 +1,45 @@ +{ + "name": "lines-and-columns", + "description": "Maps lines and columns to character offsets and back.", + "main": "dist/index.js", + "module": "dist/index.mjs", + "types": "dist/index.d.ts", + "scripts": { + "lint": "tslint --config tslint.json --project tsconfig.json --type-check", + "lint-fix": "tslint --config tslint.json --project tsconfig.json --type-check --fix", + "prebuild": "rm -rf dist", + "build": "./script/build", + "pretest": "npm run build", + "test": "mocha", + "prepublish": "npm run lint && npm run build", + "semantic-release": "semantic-release pre && npm publish && semantic-release post" + }, + "files": [ + "dist" + ], + "repository": { + "type": "git", + "url": "https://github.com/eventualbuddha/lines-and-columns.git" + }, + "keywords": [ + "lines", + "columns", + "parser" + ], + "author": "Brian Donovan ", + "license": "MIT", + "bugs": { + "url": "https://github.com/eventualbuddha/lines-and-columns/issues" + }, + "homepage": "https://github.com/eventualbuddha/lines-and-columns#readme", + "devDependencies": { + "@types/mocha": "^2.2.34", + "@types/node": "^6.0.52", + "mocha": "^3.2.0", + "semantic-release": "^6.3.2", + "ts-node": "^1.7.2", + "tslint": "^4.1.1", + "typescript": "^2.1.4" + }, + "version": "1.1.6" +} \ No newline at end of file diff --git a/node_modules/lowercase-keys/index.js b/node_modules/lowercase-keys/index.js new file mode 100644 index 00000000..b8d88983 --- /dev/null +++ b/node_modules/lowercase-keys/index.js @@ -0,0 +1,11 @@ +'use strict'; +module.exports = function (obj) { + var ret = {}; + var keys = Object.keys(Object(obj)); + + for (var i = 0; i < keys.length; i++) { + ret[keys[i].toLowerCase()] = obj[keys[i]]; + } + + return ret; +}; diff --git a/node_modules/lowercase-keys/license b/node_modules/lowercase-keys/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/lowercase-keys/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/lowercase-keys/package.json b/node_modules/lowercase-keys/package.json new file mode 100644 index 00000000..188af703 --- /dev/null +++ b/node_modules/lowercase-keys/package.json @@ -0,0 +1,35 @@ +{ + "name": "lowercase-keys", + "version": "1.0.1", + "description": "Lowercase the keys of an object", + "license": "MIT", + "repository": "sindresorhus/lowercase-keys", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "object", + "assign", + "extend", + "properties", + "lowercase", + "lower-case", + "case", + "keys", + "key" + ], + "devDependencies": { + "ava": "*" + } +} diff --git a/node_modules/lowercase-keys/readme.md b/node_modules/lowercase-keys/readme.md new file mode 100644 index 00000000..dc65770a --- /dev/null +++ b/node_modules/lowercase-keys/readme.md @@ -0,0 +1,33 @@ +# lowercase-keys [![Build Status](https://travis-ci.org/sindresorhus/lowercase-keys.svg?branch=master)](https://travis-ci.org/sindresorhus/lowercase-keys) + +> Lowercase the keys of an object + + +## Install + +``` +$ npm install --save lowercase-keys +``` + + +## Usage + +```js +var lowercaseKeys = require('lowercase-keys'); + +lowercaseKeys({FOO: true, bAr: false}); +//=> {foo: true, bar: false} +``` + + +## API + +### lowercaseKeys(object) + +Lowercases the keys and returns a new object. + + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/lru-cache/LICENSE b/node_modules/lru-cache/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/lru-cache/README.md b/node_modules/lru-cache/README.md new file mode 100644 index 00000000..d660dd57 --- /dev/null +++ b/node_modules/lru-cache/README.md @@ -0,0 +1,158 @@ +# lru cache + +A cache object that deletes the least-recently-used items. + +[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache) + +## Installation: + +```javascript +npm install lru-cache --save +``` + +## Usage: + +```javascript +var LRU = require("lru-cache") + , options = { max: 500 + , length: function (n, key) { return n * 2 + key.length } + , dispose: function (key, n) { n.close() } + , maxAge: 1000 * 60 * 60 } + , cache = LRU(options) + , otherCache = LRU(50) // sets just the max size + +cache.set("key", "value") +cache.get("key") // "value" + +// non-string keys ARE fully supported +// but note that it must be THE SAME object, not +// just a JSON-equivalent object. +var someObject = { a: 1 } +cache.set(someObject, 'a value') +// Object keys are not toString()-ed +cache.set('[object Object]', 'a different value') +assert.equal(cache.get(someObject), 'a value') +// A similar object with same keys/values won't work, +// because it's a different object identity +assert.equal(cache.get({ a: 1 }), undefined) + +cache.reset() // empty the cache +``` + +If you put more stuff in it, then items will fall out. + +If you try to put an oversized thing in it, then it'll fall out right +away. + +## Options + +* `max` The maximum size of the cache, checked by applying the length + function to all values in the cache. Not setting this is kind of + silly, since that's the whole purpose of this lib, but it defaults + to `Infinity`. +* `maxAge` Maximum age in ms. Items are not pro-actively pruned out + as they age, but if you try to get an item that is too old, it'll + drop it and return undefined instead of giving it to you. +* `length` Function that is used to calculate the length of stored + items. If you're storing strings or buffers, then you probably want + to do something like `function(n, key){return n.length}`. The default is + `function(){return 1}`, which is fine if you want to store `max` + like-sized things. The item is passed as the first argument, and + the key is passed as the second argumnet. +* `dispose` Function that is called on items when they are dropped + from the cache. This can be handy if you want to close file + descriptors or do other cleanup tasks when items are no longer + accessible. Called with `key, value`. It's called *before* + actually removing the item from the internal cache, so if you want + to immediately put it back in, you'll have to do that in a + `nextTick` or `setTimeout` callback or it won't do anything. +* `stale` By default, if you set a `maxAge`, it'll only actually pull + stale items out of the cache when you `get(key)`. (That is, it's + not pre-emptively doing a `setTimeout` or anything.) If you set + `stale:true`, it'll return the stale value before deleting it. If + you don't set this, then it'll return `undefined` when you try to + get a stale entry, as if it had already been deleted. +* `noDisposeOnSet` By default, if you set a `dispose()` method, then + it'll be called whenever a `set()` operation overwrites an existing + key. If you set this option, `dispose()` will only be called when a + key falls out of the cache, not when it is overwritten. + +## API + +* `set(key, value, maxAge)` +* `get(key) => value` + + Both of these will update the "recently used"-ness of the key. + They do what you think. `maxAge` is optional and overrides the + cache `maxAge` option if provided. + + If the key is not found, `get()` will return `undefined`. + + The key and val can be any value. + +* `peek(key)` + + Returns the key value (or `undefined` if not found) without + updating the "recently used"-ness of the key. + + (If you find yourself using this a lot, you *might* be using the + wrong sort of data structure, but there are some use cases where + it's handy.) + +* `del(key)` + + Deletes a key out of the cache. + +* `reset()` + + Clear the cache entirely, throwing away all values. + +* `has(key)` + + Check if a key is in the cache, without updating the recent-ness + or deleting it for being stale. + +* `forEach(function(value,key,cache), [thisp])` + + Just like `Array.prototype.forEach`. Iterates over all the keys + in the cache, in order of recent-ness. (Ie, more recently used + items are iterated over first.) + +* `rforEach(function(value,key,cache), [thisp])` + + The same as `cache.forEach(...)` but items are iterated over in + reverse order. (ie, less recently used items are iterated over + first.) + +* `keys()` + + Return an array of the keys in the cache. + +* `values()` + + Return an array of the values in the cache. + +* `length` + + Return total length of objects in cache taking into account + `length` options function. + +* `itemCount` + + Return total quantity of objects currently in cache. Note, that + `stale` (see options) items are returned as part of this item + count. + +* `dump()` + + Return an array of the cache entries ready for serialization and usage + with 'destinationCache.load(arr)`. + +* `load(cacheEntriesArray)` + + Loads another cache entries array, obtained with `sourceCache.dump()`, + into the cache. The destination cache is reset before loading new entries + +* `prune()` + + Manually iterates over the entire cache proactively pruning old entries diff --git a/node_modules/lru-cache/index.js b/node_modules/lru-cache/index.js new file mode 100644 index 00000000..bd35b535 --- /dev/null +++ b/node_modules/lru-cache/index.js @@ -0,0 +1,468 @@ +'use strict' + +module.exports = LRUCache + +// This will be a proper iterable 'Map' in engines that support it, +// or a fakey-fake PseudoMap in older versions. +var Map = require('pseudomap') +var util = require('util') + +// A linked list to keep track of recently-used-ness +var Yallist = require('yallist') + +// use symbols if possible, otherwise just _props +var hasSymbol = typeof Symbol === 'function' && process.env._nodeLRUCacheForceNoSymbol !== '1' +var makeSymbol +if (hasSymbol) { + makeSymbol = function (key) { + return Symbol(key) + } +} else { + makeSymbol = function (key) { + return '_' + key + } +} + +var MAX = makeSymbol('max') +var LENGTH = makeSymbol('length') +var LENGTH_CALCULATOR = makeSymbol('lengthCalculator') +var ALLOW_STALE = makeSymbol('allowStale') +var MAX_AGE = makeSymbol('maxAge') +var DISPOSE = makeSymbol('dispose') +var NO_DISPOSE_ON_SET = makeSymbol('noDisposeOnSet') +var LRU_LIST = makeSymbol('lruList') +var CACHE = makeSymbol('cache') + +function naiveLength () { return 1 } + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +function LRUCache (options) { + if (!(this instanceof LRUCache)) { + return new LRUCache(options) + } + + if (typeof options === 'number') { + options = { max: options } + } + + if (!options) { + options = {} + } + + var max = this[MAX] = options.max + // Kind of weird to have a default max of Infinity, but oh well. + if (!max || + !(typeof max === 'number') || + max <= 0) { + this[MAX] = Infinity + } + + var lc = options.length || naiveLength + if (typeof lc !== 'function') { + lc = naiveLength + } + this[LENGTH_CALCULATOR] = lc + + this[ALLOW_STALE] = options.stale || false + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this.reset() +} + +// resize the cache when the max changes. +Object.defineProperty(LRUCache.prototype, 'max', { + set: function (mL) { + if (!mL || !(typeof mL === 'number') || mL <= 0) { + mL = Infinity + } + this[MAX] = mL + trim(this) + }, + get: function () { + return this[MAX] + }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'allowStale', { + set: function (allowStale) { + this[ALLOW_STALE] = !!allowStale + }, + get: function () { + return this[ALLOW_STALE] + }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'maxAge', { + set: function (mA) { + if (!mA || !(typeof mA === 'number') || mA < 0) { + mA = 0 + } + this[MAX_AGE] = mA + trim(this) + }, + get: function () { + return this[MAX_AGE] + }, + enumerable: true +}) + +// resize the cache when the lengthCalculator changes. +Object.defineProperty(LRUCache.prototype, 'lengthCalculator', { + set: function (lC) { + if (typeof lC !== 'function') { + lC = naiveLength + } + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(function (hit) { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }, this) + } + trim(this) + }, + get: function () { return this[LENGTH_CALCULATOR] }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'length', { + get: function () { return this[LENGTH] }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'itemCount', { + get: function () { return this[LRU_LIST].length }, + enumerable: true +}) + +LRUCache.prototype.rforEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this[LRU_LIST].tail; walker !== null;) { + var prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } +} + +function forEachStep (self, fn, node, thisp) { + var hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) { + hit = undefined + } + } + if (hit) { + fn.call(thisp, hit.value, hit.key, self) + } +} + +LRUCache.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this[LRU_LIST].head; walker !== null;) { + var next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } +} + +LRUCache.prototype.keys = function () { + return this[LRU_LIST].toArray().map(function (k) { + return k.key + }, this) +} + +LRUCache.prototype.values = function () { + return this[LRU_LIST].toArray().map(function (k) { + return k.value + }, this) +} + +LRUCache.prototype.reset = function () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(function (hit) { + this[DISPOSE](hit.key, hit.value) + }, this) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list +} + +LRUCache.prototype.dump = function () { + return this[LRU_LIST].map(function (hit) { + if (!isStale(this, hit)) { + return { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + } + } + }, this).toArray().filter(function (h) { + return h + }) +} + +LRUCache.prototype.dumpLru = function () { + return this[LRU_LIST] +} + +/* istanbul ignore next */ +LRUCache.prototype.inspect = function (n, opts) { + var str = 'LRUCache {' + var extras = false + + var as = this[ALLOW_STALE] + if (as) { + str += '\n allowStale: true' + extras = true + } + + var max = this[MAX] + if (max && max !== Infinity) { + if (extras) { + str += ',' + } + str += '\n max: ' + util.inspect(max, opts) + extras = true + } + + var maxAge = this[MAX_AGE] + if (maxAge) { + if (extras) { + str += ',' + } + str += '\n maxAge: ' + util.inspect(maxAge, opts) + extras = true + } + + var lc = this[LENGTH_CALCULATOR] + if (lc && lc !== naiveLength) { + if (extras) { + str += ',' + } + str += '\n length: ' + util.inspect(this[LENGTH], opts) + extras = true + } + + var didFirst = false + this[LRU_LIST].forEach(function (item) { + if (didFirst) { + str += ',\n ' + } else { + if (extras) { + str += ',\n' + } + didFirst = true + str += '\n ' + } + var key = util.inspect(item.key).split('\n').join('\n ') + var val = { value: item.value } + if (item.maxAge !== maxAge) { + val.maxAge = item.maxAge + } + if (lc !== naiveLength) { + val.length = item.length + } + if (isStale(this, item)) { + val.stale = true + } + + val = util.inspect(val, opts).split('\n').join('\n ') + str += key + ' => ' + val + }) + + if (didFirst || extras) { + str += '\n' + } + str += '}' + + return str +} + +LRUCache.prototype.set = function (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + var now = maxAge ? Date.now() : 0 + var len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + var node = this[CACHE].get(key) + var item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) { + this[DISPOSE](key, item.value) + } + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + var hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) { + this[DISPOSE](key, value) + } + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true +} + +LRUCache.prototype.has = function (key) { + if (!this[CACHE].has(key)) return false + var hit = this[CACHE].get(key).value + if (isStale(this, hit)) { + return false + } + return true +} + +LRUCache.prototype.get = function (key) { + return get(this, key, true) +} + +LRUCache.prototype.peek = function (key) { + return get(this, key, false) +} + +LRUCache.prototype.pop = function () { + var node = this[LRU_LIST].tail + if (!node) return null + del(this, node) + return node.value +} + +LRUCache.prototype.del = function (key) { + del(this, this[CACHE].get(key)) +} + +LRUCache.prototype.load = function (arr) { + // reset the cache + this.reset() + + var now = Date.now() + // A previous serialized cache has the most recent items first + for (var l = arr.length - 1; l >= 0; l--) { + var hit = arr[l] + var expiresAt = hit.e || 0 + if (expiresAt === 0) { + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + } else { + var maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } +} + +LRUCache.prototype.prune = function () { + var self = this + this[CACHE].forEach(function (value, key) { + get(self, key, false) + }) +} + +function get (self, key, doUse) { + var node = self[CACHE].get(key) + if (node) { + var hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) hit = undefined + } else { + if (doUse) { + self[LRU_LIST].unshiftNode(node) + } + } + if (hit) hit = hit.value + } + return hit +} + +function isStale (self, hit) { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) { + return false + } + var stale = false + var diff = Date.now() - hit.now + if (hit.maxAge) { + stale = diff > hit.maxAge + } else { + stale = self[MAX_AGE] && (diff > self[MAX_AGE]) + } + return stale +} + +function trim (self) { + if (self[LENGTH] > self[MAX]) { + for (var walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + var prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +function del (self, node) { + if (node) { + var hit = node.value + if (self[DISPOSE]) { + self[DISPOSE](hit.key, hit.value) + } + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +// classy, since V8 prefers predictable objects. +function Entry (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 +} diff --git a/node_modules/lru-cache/node_modules/yallist/LICENSE b/node_modules/lru-cache/node_modules/yallist/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/lru-cache/node_modules/yallist/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/lru-cache/node_modules/yallist/README.md b/node_modules/lru-cache/node_modules/yallist/README.md new file mode 100644 index 00000000..f5861018 --- /dev/null +++ b/node_modules/lru-cache/node_modules/yallist/README.md @@ -0,0 +1,204 @@ +# yallist + +Yet Another Linked List + +There are many doubly-linked list implementations like it, but this +one is mine. + +For when an array would be too big, and a Map can't be iterated in +reverse order. + + +[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist) + +## basic usage + +```javascript +var yallist = require('yallist') +var myList = yallist.create([1, 2, 3]) +myList.push('foo') +myList.unshift('bar') +// of course pop() and shift() are there, too +console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo'] +myList.forEach(function (k) { + // walk the list head to tail +}) +myList.forEachReverse(function (k, index, list) { + // walk the list tail to head +}) +var myDoubledList = myList.map(function (k) { + return k + k +}) +// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo'] +// mapReverse is also a thing +var myDoubledListReverse = myList.mapReverse(function (k) { + return k + k +}) // ['foofoo', 6, 4, 2, 'barbar'] + +var reduced = myList.reduce(function (set, entry) { + set += entry + return set +}, 'start') +console.log(reduced) // 'startfoo123bar' +``` + +## api + +The whole API is considered "public". + +Functions with the same name as an Array method work more or less the +same way. + +There's reverse versions of most things because that's the point. + +### Yallist + +Default export, the class that holds and manages a list. + +Call it with either a forEach-able (like an array) or a set of +arguments, to initialize the list. + +The Array-ish methods all act like you'd expect. No magic length, +though, so if you change that it won't automatically prune or add +empty spots. + +### Yallist.create(..) + +Alias for Yallist function. Some people like factories. + +#### yallist.head + +The first node in the list + +#### yallist.tail + +The last node in the list + +#### yallist.length + +The number of nodes in the list. (Change this at your peril. It is +not magic like Array length.) + +#### yallist.toArray() + +Convert the list to an array. + +#### yallist.forEach(fn, [thisp]) + +Call a function on each item in the list. + +#### yallist.forEachReverse(fn, [thisp]) + +Call a function on each item in the list, in reverse order. + +#### yallist.get(n) + +Get the data at position `n` in the list. If you use this a lot, +probably better off just using an Array. + +#### yallist.getReverse(n) + +Get the data at position `n`, counting from the tail. + +#### yallist.map(fn, thisp) + +Create a new Yallist with the result of calling the function on each +item. + +#### yallist.mapReverse(fn, thisp) + +Same as `map`, but in reverse. + +#### yallist.pop() + +Get the data from the list tail, and remove the tail from the list. + +#### yallist.push(item, ...) + +Insert one or more items to the tail of the list. + +#### yallist.reduce(fn, initialValue) + +Like Array.reduce. + +#### yallist.reduceReverse + +Like Array.reduce, but in reverse. + +#### yallist.reverse + +Reverse the list in place. + +#### yallist.shift() + +Get the data from the list head, and remove the head from the list. + +#### yallist.slice([from], [to]) + +Just like Array.slice, but returns a new Yallist. + +#### yallist.sliceReverse([from], [to]) + +Just like yallist.slice, but the result is returned in reverse. + +#### yallist.toArray() + +Create an array representation of the list. + +#### yallist.toArrayReverse() + +Create a reversed array representation of the list. + +#### yallist.unshift(item, ...) + +Insert one or more items to the head of the list. + +#### yallist.unshiftNode(node) + +Move a Node object to the front of the list. (That is, pull it out of +wherever it lives, and make it the new head.) + +If the node belongs to a different list, then that list will remove it +first. + +#### yallist.pushNode(node) + +Move a Node object to the end of the list. (That is, pull it out of +wherever it lives, and make it the new tail.) + +If the node belongs to a list already, then that list will remove it +first. + +#### yallist.removeNode(node) + +Remove a node from the list, preserving referential integrity of head +and tail and other nodes. + +Will throw an error if you try to have a list remove a node that +doesn't belong to it. + +### Yallist.Node + +The class that holds the data and is actually the list. + +Call with `var n = new Node(value, previousNode, nextNode)` + +Note that if you do direct operations on Nodes themselves, it's very +easy to get into weird states where the list is broken. Be careful :) + +#### node.next + +The next node in the list. + +#### node.prev + +The previous node in the list. + +#### node.value + +The data the node contains. + +#### node.list + +The list to which this node belongs. (Null if it does not belong to +any list.) diff --git a/node_modules/lru-cache/node_modules/yallist/iterator.js b/node_modules/lru-cache/node_modules/yallist/iterator.js new file mode 100644 index 00000000..4a15bf22 --- /dev/null +++ b/node_modules/lru-cache/node_modules/yallist/iterator.js @@ -0,0 +1,7 @@ +var Yallist = require('./yallist.js') + +Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } +} diff --git a/node_modules/lru-cache/node_modules/yallist/package.json b/node_modules/lru-cache/node_modules/yallist/package.json new file mode 100644 index 00000000..17a13d1c --- /dev/null +++ b/node_modules/lru-cache/node_modules/yallist/package.json @@ -0,0 +1,29 @@ +{ + "name": "yallist", + "version": "2.1.2", + "description": "Yet Another Linked List", + "main": "yallist.js", + "directories": { + "test": "test" + }, + "files": [ + "yallist.js", + "iterator.js" + ], + "dependencies": {}, + "devDependencies": { + "tap": "^10.3.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/yallist.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/lru-cache/node_modules/yallist/yallist.js b/node_modules/lru-cache/node_modules/yallist/yallist.js new file mode 100644 index 00000000..518d2333 --- /dev/null +++ b/node_modules/lru-cache/node_modules/yallist/yallist.js @@ -0,0 +1,370 @@ +module.exports = Yallist + +Yallist.Node = Node +Yallist.create = Yallist + +function Yallist (list) { + var self = this + if (!(self instanceof Yallist)) { + self = new Yallist() + } + + self.tail = null + self.head = null + self.length = 0 + + if (list && typeof list.forEach === 'function') { + list.forEach(function (item) { + self.push(item) + }) + } else if (arguments.length > 0) { + for (var i = 0, l = arguments.length; i < l; i++) { + self.push(arguments[i]) + } + } + + return self +} + +Yallist.prototype.removeNode = function (node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list') + } + + var next = node.next + var prev = node.prev + + if (next) { + next.prev = prev + } + + if (prev) { + prev.next = next + } + + if (node === this.head) { + this.head = next + } + if (node === this.tail) { + this.tail = prev + } + + node.list.length-- + node.next = null + node.prev = null + node.list = null +} + +Yallist.prototype.unshiftNode = function (node) { + if (node === this.head) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var head = this.head + node.list = this + node.next = head + if (head) { + head.prev = node + } + + this.head = node + if (!this.tail) { + this.tail = node + } + this.length++ +} + +Yallist.prototype.pushNode = function (node) { + if (node === this.tail) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var tail = this.tail + node.list = this + node.prev = tail + if (tail) { + tail.next = node + } + + this.tail = node + if (!this.head) { + this.head = node + } + this.length++ +} + +Yallist.prototype.push = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + push(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.unshift = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + unshift(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.pop = function () { + if (!this.tail) { + return undefined + } + + var res = this.tail.value + this.tail = this.tail.prev + if (this.tail) { + this.tail.next = null + } else { + this.head = null + } + this.length-- + return res +} + +Yallist.prototype.shift = function () { + if (!this.head) { + return undefined + } + + var res = this.head.value + this.head = this.head.next + if (this.head) { + this.head.prev = null + } else { + this.tail = null + } + this.length-- + return res +} + +Yallist.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.head, i = 0; walker !== null; i++) { + fn.call(thisp, walker.value, i, this) + walker = walker.next + } +} + +Yallist.prototype.forEachReverse = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { + fn.call(thisp, walker.value, i, this) + walker = walker.prev + } +} + +Yallist.prototype.get = function (n) { + for (var i = 0, walker = this.head; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.next + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.getReverse = function (n) { + for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.map = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.head; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.next + } + return res +} + +Yallist.prototype.mapReverse = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.tail; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.prev + } + return res +} + +Yallist.prototype.reduce = function (fn, initial) { + var acc + var walker = this.head + if (arguments.length > 1) { + acc = initial + } else if (this.head) { + walker = this.head.next + acc = this.head.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = 0; walker !== null; i++) { + acc = fn(acc, walker.value, i) + walker = walker.next + } + + return acc +} + +Yallist.prototype.reduceReverse = function (fn, initial) { + var acc + var walker = this.tail + if (arguments.length > 1) { + acc = initial + } else if (this.tail) { + walker = this.tail.prev + acc = this.tail.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = this.length - 1; walker !== null; i--) { + acc = fn(acc, walker.value, i) + walker = walker.prev + } + + return acc +} + +Yallist.prototype.toArray = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.head; walker !== null; i++) { + arr[i] = walker.value + walker = walker.next + } + return arr +} + +Yallist.prototype.toArrayReverse = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.tail; walker !== null; i++) { + arr[i] = walker.value + walker = walker.prev + } + return arr +} + +Yallist.prototype.slice = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = 0, walker = this.head; walker !== null && i < from; i++) { + walker = walker.next + } + for (; walker !== null && i < to; i++, walker = walker.next) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.sliceReverse = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { + walker = walker.prev + } + for (; walker !== null && i > from; i--, walker = walker.prev) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.reverse = function () { + var head = this.head + var tail = this.tail + for (var walker = head; walker !== null; walker = walker.prev) { + var p = walker.prev + walker.prev = walker.next + walker.next = p + } + this.head = tail + this.tail = head + return this +} + +function push (self, item) { + self.tail = new Node(item, self.tail, null, self) + if (!self.head) { + self.head = self.tail + } + self.length++ +} + +function unshift (self, item) { + self.head = new Node(item, null, self.head, self) + if (!self.tail) { + self.tail = self.head + } + self.length++ +} + +function Node (value, prev, next, list) { + if (!(this instanceof Node)) { + return new Node(value, prev, next, list) + } + + this.list = list + this.value = value + + if (prev) { + prev.next = this + this.prev = prev + } else { + this.prev = null + } + + if (next) { + next.prev = this + this.next = next + } else { + this.next = null + } +} diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json new file mode 100644 index 00000000..bf7a06dc --- /dev/null +++ b/node_modules/lru-cache/package.json @@ -0,0 +1,36 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "4.1.5", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "test": "tap test/*.js --100 -J", + "snap": "TAP_SNAPSHOT=1 tap test/*.js -J", + "posttest": "standard test/*.js index.js", + "coveragerport": "tap --coverage-report=html", + "lintfix": "standard --fix test/*.js index.js", + "preversion": "npm test", + "postversion": "npm publish --tag=legacy", + "postpublish": "git push origin --all; git push origin --tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "benchmark": "^2.1.4", + "standard": "^12.0.1", + "tap": "^12.1.0" + }, + "license": "ISC", + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + }, + "files": [ + "index.js" + ] +} diff --git a/node_modules/make-dir/index.js b/node_modules/make-dir/index.js new file mode 100644 index 00000000..18439555 --- /dev/null +++ b/node_modules/make-dir/index.js @@ -0,0 +1,85 @@ +'use strict'; +const fs = require('fs'); +const path = require('path'); +const pify = require('pify'); + +const defaults = { + mode: 0o777 & (~process.umask()), + fs +}; + +// https://github.com/nodejs/node/issues/8987 +// https://github.com/libuv/libuv/pull/1088 +const checkPath = pth => { + if (process.platform === 'win32') { + const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')); + + if (pathHasInvalidWinCharacters) { + const err = new Error(`Path contains invalid characters: ${pth}`); + err.code = 'EINVAL'; + throw err; + } + } +}; + +module.exports = (input, opts) => Promise.resolve().then(() => { + checkPath(input); + opts = Object.assign({}, defaults, opts); + + const mkdir = pify(opts.fs.mkdir); + const stat = pify(opts.fs.stat); + + const make = pth => { + return mkdir(pth, opts.mode) + .then(() => pth) + .catch(err => { + if (err.code === 'ENOENT') { + if (err.message.includes('null bytes') || path.dirname(pth) === pth) { + throw err; + } + + return make(path.dirname(pth)).then(() => make(pth)); + } + + return stat(pth) + .then(stats => stats.isDirectory() ? pth : Promise.reject()) + .catch(() => { + throw err; + }); + }); + }; + + return make(path.resolve(input)); +}); + +module.exports.sync = (input, opts) => { + checkPath(input); + opts = Object.assign({}, defaults, opts); + + const make = pth => { + try { + opts.fs.mkdirSync(pth, opts.mode); + } catch (err) { + if (err.code === 'ENOENT') { + if (err.message.includes('null bytes') || path.dirname(pth) === pth) { + throw err; + } + + make(path.dirname(pth)); + return make(pth); + } + + try { + if (!opts.fs.statSync(pth).isDirectory()) { + throw new Error('The path is not a directory'); + } + } catch (_) { + throw err; + } + } + + return pth; + }; + + return make(path.resolve(input)); +}; diff --git a/node_modules/make-dir/license b/node_modules/make-dir/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/make-dir/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/make-dir/package.json b/node_modules/make-dir/package.json new file mode 100644 index 00000000..ec907a71 --- /dev/null +++ b/node_modules/make-dir/package.json @@ -0,0 +1,54 @@ +{ + "name": "make-dir", + "version": "1.3.0", + "description": "Make a directory and its parents if needed - Think `mkdir -p`", + "license": "MIT", + "repository": "sindresorhus/make-dir", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && nyc ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "mkdir", + "mkdirp", + "make", + "directories", + "dir", + "dirs", + "folders", + "directory", + "folder", + "path", + "parent", + "parents", + "intermediate", + "recursively", + "recursive", + "create", + "fs", + "filesystem", + "file-system" + ], + "dependencies": { + "pify": "^3.0.0" + }, + "devDependencies": { + "ava": "*", + "codecov": "^3.0.0", + "graceful-fs": "^4.1.11", + "nyc": "^11.3.0", + "path-type": "^3.0.0", + "tempy": "^0.2.1", + "xo": "^0.20.0" + } +} diff --git a/node_modules/make-dir/readme.md b/node_modules/make-dir/readme.md new file mode 100644 index 00000000..8a32bf47 --- /dev/null +++ b/node_modules/make-dir/readme.md @@ -0,0 +1,116 @@ +# make-dir [![Build Status: macOS & Linux](https://travis-ci.org/sindresorhus/make-dir.svg?branch=master)](https://travis-ci.org/sindresorhus/make-dir) [![Build status: Windows](https://ci.appveyor.com/api/projects/status/e0vtt8y600w91gcs/branch/master?svg=true)](https://ci.appveyor.com/project/sindresorhus/make-dir/branch/master) [![codecov](https://codecov.io/gh/sindresorhus/make-dir/branch/master/graph/badge.svg)](https://codecov.io/gh/sindresorhus/make-dir) + +> Make a directory and its parents if needed - Think `mkdir -p` + + +## Advantages over [`mkdirp`](https://github.com/substack/node-mkdirp) + +- Promise API *(Async/await ready!)* +- Fixes many `mkdirp` issues: [#96](https://github.com/substack/node-mkdirp/pull/96) [#70](https://github.com/substack/node-mkdirp/issues/70) [#66](https://github.com/substack/node-mkdirp/issues/66) +- 100% test coverage +- CI-tested on macOS, Linux, and Windows +- Actively maintained +- Doesn't bundle a CLI + + +## Install + +``` +$ npm install make-dir +``` + + +## Usage + +``` +$ pwd +/Users/sindresorhus/fun +$ tree +. +``` + +```js +const makeDir = require('make-dir'); + +makeDir('unicorn/rainbow/cake').then(path => { + console.log(path); + //=> '/Users/sindresorhus/fun/unicorn/rainbow/cake' +}); +``` + +``` +$ tree +. +└── unicorn + └── rainbow + └── cake +``` + +Multiple directories: + +```js +const makeDir = require('make-dir'); + +Promise.all([ + makeDir('unicorn/rainbow') + makeDir('foo/bar') +]).then(paths => { + console.log(paths); + /* + [ + '/Users/sindresorhus/fun/unicorn/rainbow', + '/Users/sindresorhus/fun/foo/bar' + ] + */ +}); +``` + + +## API + +### makeDir(path, [options]) + +Returns a `Promise` for the path to the created directory. + +### makeDir.sync(path, [options]) + +Returns the path to the created directory. + +#### path + +Type: `string` + +Directory to create. + +#### options + +Type: `Object` + +##### mode + +Type: `integer`
+Default: `0o777 & (~process.umask())` + +Directory [permissions](https://x-team.com/blog/file-system-permissions-umask-node-js/). + +##### fs + +Type: `Object`
+Default: `require('fs')` + +Use a custom `fs` implementation. For example [`graceful-fs`](https://github.com/isaacs/node-graceful-fs). + + +## Related + +- [make-dir-cli](https://github.com/sindresorhus/make-dir-cli) - CLI for this module +- [del](https://github.com/sindresorhus/del) - Delete files and directories +- [globby](https://github.com/sindresorhus/globby) - User-friendly glob matching +- [cpy](https://github.com/sindresorhus/cpy) - Copy files +- [cpy-cli](https://github.com/sindresorhus/cpy-cli) - Copy files on the command-line +- [move-file](https://github.com/sindresorhus/move-file) - Move a file + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/map-cache/LICENSE b/node_modules/map-cache/LICENSE new file mode 100644 index 00000000..1e49edf8 --- /dev/null +++ b/node_modules/map-cache/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/map-cache/README.md b/node_modules/map-cache/README.md new file mode 100644 index 00000000..6260b29f --- /dev/null +++ b/node_modules/map-cache/README.md @@ -0,0 +1,145 @@ +# map-cache [![NPM version](https://img.shields.io/npm/v/map-cache.svg?style=flat)](https://www.npmjs.com/package/map-cache) [![NPM downloads](https://img.shields.io/npm/dm/map-cache.svg?style=flat)](https://npmjs.org/package/map-cache) [![Build Status](https://img.shields.io/travis/jonschlinkert/map-cache.svg?style=flat)](https://travis-ci.org/jonschlinkert/map-cache) + +Basic cache object for storing key-value pairs. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install map-cache --save +``` + +Based on MapCache in Lo-dash v3.0. [MIT License](https://github.com/lodash/lodash/blob/master/LICENSE.txt) + +## Usage + +```js +var MapCache = require('map-cache'); +var mapCache = new MapCache(); +``` + +## API + +### [MapCache](index.js#L28) + +Creates a cache object to store key/value pairs. + +**Example** + +```js +var cache = new MapCache(); +``` + +### [.set](index.js#L45) + +Adds `value` to `key` on the cache. + +**Params** + +* `key` **{String}**: The key of the value to cache. +* `value` **{any}**: The value to cache. +* `returns` **{Object}**: Returns the `Cache` object for chaining. + +**Example** + +```js +cache.set('foo', 'bar'); +``` + +### [.get](index.js#L65) + +Gets the cached value for `key`. + +**Params** + +* `key` **{String}**: The key of the value to get. +* `returns` **{any}**: Returns the cached value. + +**Example** + +```js +cache.get('foo'); +//=> 'bar' +``` + +### [.has](index.js#L82) + +Checks if a cached value for `key` exists. + +**Params** + +* `key` **{String}**: The key of the entry to check. +* `returns` **{Boolean}**: Returns `true` if an entry for `key` exists, else `false`. + +**Example** + +```js +cache.has('foo'); +//=> true +``` + +### [.del](index.js#L98) + +Removes `key` and its value from the cache. + +**Params** + +* `key` **{String}**: The key of the value to remove. +* `returns` **{Boolean}**: Returns `true` if the entry was removed successfully, else `false`. + +**Example** + +```js +cache.del('foo'); +``` + +## Related projects + +You might also be interested in these projects: + +* [cache-base](https://www.npmjs.com/package/cache-base): Basic object cache with `get`, `set`, `del`, and `has` methods for node.js/javascript projects. | [homepage](https://github.com/jonschlinkert/cache-base) +* [config-cache](https://www.npmjs.com/package/config-cache): General purpose JavaScript object storage methods. | [homepage](https://github.com/jonschlinkert/config-cache) +* [option-cache](https://www.npmjs.com/package/option-cache): Simple API for managing options in JavaScript applications. | [homepage](https://github.com/jonschlinkert/option-cache) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/map-cache/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/map-cache/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v0.9.0, on May 10, 2016._ \ No newline at end of file diff --git a/node_modules/map-cache/index.js b/node_modules/map-cache/index.js new file mode 100644 index 00000000..f86842f2 --- /dev/null +++ b/node_modules/map-cache/index.js @@ -0,0 +1,100 @@ +/*! + * map-cache + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var hasOwn = Object.prototype.hasOwnProperty; + +/** + * Expose `MapCache` + */ + +module.exports = MapCache; + +/** + * Creates a cache object to store key/value pairs. + * + * ```js + * var cache = new MapCache(); + * ``` + * + * @api public + */ + +function MapCache(data) { + this.__data__ = data || {}; +} + +/** + * Adds `value` to `key` on the cache. + * + * ```js + * cache.set('foo', 'bar'); + * ``` + * + * @param {String} `key` The key of the value to cache. + * @param {*} `value` The value to cache. + * @returns {Object} Returns the `Cache` object for chaining. + * @api public + */ + +MapCache.prototype.set = function mapSet(key, value) { + if (key !== '__proto__') { + this.__data__[key] = value; + } + return this; +}; + +/** + * Gets the cached value for `key`. + * + * ```js + * cache.get('foo'); + * //=> 'bar' + * ``` + * + * @param {String} `key` The key of the value to get. + * @returns {*} Returns the cached value. + * @api public + */ + +MapCache.prototype.get = function mapGet(key) { + return key === '__proto__' ? undefined : this.__data__[key]; +}; + +/** + * Checks if a cached value for `key` exists. + * + * ```js + * cache.has('foo'); + * //=> true + * ``` + * + * @param {String} `key` The key of the entry to check. + * @returns {Boolean} Returns `true` if an entry for `key` exists, else `false`. + * @api public + */ + +MapCache.prototype.has = function mapHas(key) { + return key !== '__proto__' && hasOwn.call(this.__data__, key); +}; + +/** + * Removes `key` and its value from the cache. + * + * ```js + * cache.del('foo'); + * ``` + * @title .del + * @param {String} `key` The key of the value to remove. + * @returns {Boolean} Returns `true` if the entry was removed successfully, else `false`. + * @api public + */ + +MapCache.prototype.del = function mapDelete(key) { + return this.has(key) && delete this.__data__[key]; +}; diff --git a/node_modules/map-cache/package.json b/node_modules/map-cache/package.json new file mode 100644 index 00000000..8bf0af2c --- /dev/null +++ b/node_modules/map-cache/package.json @@ -0,0 +1,59 @@ +{ + "name": "map-cache", + "description": "Basic cache object for storing key-value pairs.", + "version": "0.2.2", + "homepage": "https://github.com/jonschlinkert/map-cache", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/map-cache", + "bugs": { + "url": "https://github.com/jonschlinkert/map-cache/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.9", + "should": "^8.3.1" + }, + "keywords": [ + "cache", + "get", + "has", + "object", + "set", + "storage", + "store" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "config-cache", + "option-cache", + "cache-base" + ] + }, + "reflinks": [ + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/map-visit/LICENSE b/node_modules/map-visit/LICENSE new file mode 100644 index 00000000..83b56e70 --- /dev/null +++ b/node_modules/map-visit/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/map-visit/README.md b/node_modules/map-visit/README.md new file mode 100644 index 00000000..5ab02d45 --- /dev/null +++ b/node_modules/map-visit/README.md @@ -0,0 +1,155 @@ +# map-visit [![NPM version](https://img.shields.io/npm/v/map-visit.svg?style=flat)](https://www.npmjs.com/package/map-visit) [![NPM monthly downloads](https://img.shields.io/npm/dm/map-visit.svg?style=flat)](https://npmjs.org/package/map-visit) [![NPM total downloads](https://img.shields.io/npm/dt/map-visit.svg?style=flat)](https://npmjs.org/package/map-visit) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/map-visit.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/map-visit) + +> Map `visit` over an array of objects. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save map-visit +``` + +## Usage + +```js +var mapVisit = require('map-visit'); +``` + +## What does this do? + +**Assign/Merge/Extend vs. Visit** + +Let's say you want to add a `set` method to your application that will: + +* set key-value pairs on a `data` object +* extend objects onto the `data` object +* extend arrays of objects onto the data object + +**Example using `extend`** + +Here is one way to accomplish this using Lo-Dash's `extend` (comparable to `Object.assign`): + +```js +var _ = require('lodash'); + +var obj = { + data: {}, + set: function (key, value) { + if (Array.isArray(key)) { + _.extend.apply(_, [obj.data].concat(key)); + } else if (typeof key === 'object') { + _.extend(obj.data, key); + } else { + obj.data[key] = value; + } + } +}; + +obj.set('a', 'a'); +obj.set([{b: 'b'}, {c: 'c'}]); +obj.set({d: {e: 'f'}}); + +console.log(obj.data); +//=> {a: 'a', b: 'b', c: 'c', d: { e: 'f' }} +``` + +The above approach works fine for most use cases. However, **if you also want to emit an event** each time a property is added to the `data` object, or you want more control over what happens as the object is extended, a better approach would be to use `visit`. + +**Example using `visit`** + +In this approach: + +* when an array is passed to `set`, the `mapVisit` library calls the `set` method on each object in the array. +* when an object is passed, `visit` calls `set` on each property in the object. + +As a result, the `data` event will be emitted every time a property is added to `data` (events are just an example, you can use this approach to perform any necessary logic every time the method is called). + +```js +var mapVisit = require('map-visit'); +var visit = require('object-visit'); + +var obj = { + data: {}, + set: function (key, value) { + if (Array.isArray(key)) { + mapVisit(obj, 'set', key); + } else if (typeof key === 'object') { + visit(obj, 'set', key); + } else { + // simulate an event-emitter + console.log('emit', key, value); + obj.data[key] = value; + } + } +}; + +obj.set('a', 'a'); +obj.set([{b: 'b'}, {c: 'c'}]); +obj.set({d: {e: 'f'}}); +obj.set({g: 'h', i: 'j', k: 'l'}); + +console.log(obj.data); +//=> {a: 'a', b: 'b', c: 'c', d: { e: 'f' }, g: 'h', i: 'j', k: 'l'} + +// events would look something like: +// emit a a +// emit b b +// emit c c +// emit d { e: 'f' } +// emit g h +// emit i j +// emit k l +``` + +## About + +### Related projects + +* [collection-visit](https://www.npmjs.com/package/collection-visit): Visit a method over the items in an object, or map visit over the objects… [more](https://github.com/jonschlinkert/collection-visit) | [homepage](https://github.com/jonschlinkert/collection-visit "Visit a method over the items in an object, or map visit over the objects in an array.") +* [object-visit](https://www.npmjs.com/package/object-visit): Call a specified method on each value in the given object. | [homepage](https://github.com/jonschlinkert/object-visit "Call a specified method on each value in the given object.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 15 | [jonschlinkert](https://github.com/jonschlinkert) | +| 7 | [doowb](https://github.com/doowb) | + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.5.0, on April 09, 2017._ \ No newline at end of file diff --git a/node_modules/map-visit/index.js b/node_modules/map-visit/index.js new file mode 100644 index 00000000..bc54ccc4 --- /dev/null +++ b/node_modules/map-visit/index.js @@ -0,0 +1,37 @@ +'use strict'; + +var util = require('util'); +var visit = require('object-visit'); + +/** + * Map `visit` over an array of objects. + * + * @param {Object} `collection` The context in which to invoke `method` + * @param {String} `method` Name of the method to call on `collection` + * @param {Object} `arr` Array of objects. + */ + +module.exports = function mapVisit(collection, method, val) { + if (isObject(val)) { + return visit.apply(null, arguments); + } + + if (!Array.isArray(val)) { + throw new TypeError('expected an array: ' + util.inspect(val)); + } + + var args = [].slice.call(arguments, 3); + + for (var i = 0; i < val.length; i++) { + var ele = val[i]; + if (isObject(ele)) { + visit.apply(null, [collection, method, ele].concat(args)); + } else { + collection[method].apply(collection, [ele].concat(args)); + } + } +}; + +function isObject(val) { + return val && (typeof val === 'function' || (!Array.isArray(val) && typeof val === 'object')); +} diff --git a/node_modules/map-visit/package.json b/node_modules/map-visit/package.json new file mode 100644 index 00000000..e8d0f41c --- /dev/null +++ b/node_modules/map-visit/package.json @@ -0,0 +1,74 @@ +{ + "name": "map-visit", + "description": "Map `visit` over an array of objects.", + "version": "1.0.0", + "homepage": "https://github.com/jonschlinkert/map-visit", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/map-visit", + "bugs": { + "url": "https://github.com/jonschlinkert/map-visit/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "object-visit": "^1.0.0" + }, + "devDependencies": { + "clone-deep": "^0.2.4", + "extend-shallow": "^2.0.1", + "gulp-format-md": "^0.1.12", + "lodash": "^4.17.4", + "mocha": "^3.2.0" + }, + "keywords": [ + "array", + "arrays", + "function", + "helper", + "invoke", + "key", + "map", + "method", + "object", + "objects", + "value", + "visit", + "visitor" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "collection-visit", + "object-visit" + ] + }, + "reflinks": [ + "verb", + "verb-generate-readme" + ] + } +} diff --git a/node_modules/media-typer/HISTORY.md b/node_modules/media-typer/HISTORY.md new file mode 100644 index 00000000..62c20031 --- /dev/null +++ b/node_modules/media-typer/HISTORY.md @@ -0,0 +1,22 @@ +0.3.0 / 2014-09-07 +================== + + * Support Node.js 0.6 + * Throw error when parameter format invalid on parse + +0.2.0 / 2014-06-18 +================== + + * Add `typer.format()` to format media types + +0.1.0 / 2014-06-17 +================== + + * Accept `req` as argument to `parse` + * Accept `res` as argument to `parse` + * Parse media type with extra LWS between type and first parameter + +0.0.0 / 2014-06-13 +================== + + * Initial implementation diff --git a/node_modules/media-typer/LICENSE b/node_modules/media-typer/LICENSE new file mode 100644 index 00000000..b7dce6cf --- /dev/null +++ b/node_modules/media-typer/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/media-typer/README.md b/node_modules/media-typer/README.md new file mode 100644 index 00000000..d8df6234 --- /dev/null +++ b/node_modules/media-typer/README.md @@ -0,0 +1,81 @@ +# media-typer + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Simple RFC 6838 media type parser + +## Installation + +```sh +$ npm install media-typer +``` + +## API + +```js +var typer = require('media-typer') +``` + +### typer.parse(string) + +```js +var obj = typer.parse('image/svg+xml; charset=utf-8') +``` + +Parse a media type string. This will return an object with the following +properties (examples are shown for the string `'image/svg+xml; charset=utf-8'`): + + - `type`: The type of the media type (always lower case). Example: `'image'` + + - `subtype`: The subtype of the media type (always lower case). Example: `'svg'` + + - `suffix`: The suffix of the media type (always lower case). Example: `'xml'` + + - `parameters`: An object of the parameters in the media type (name of parameter always lower case). Example: `{charset: 'utf-8'}` + +### typer.parse(req) + +```js +var obj = typer.parse(req) +``` + +Parse the `content-type` header from the given `req`. Short-cut for +`typer.parse(req.headers['content-type'])`. + +### typer.parse(res) + +```js +var obj = typer.parse(res) +``` + +Parse the `content-type` header set on the given `res`. Short-cut for +`typer.parse(res.getHeader('content-type'))`. + +### typer.format(obj) + +```js +var obj = typer.format({type: 'image', subtype: 'svg', suffix: 'xml'}) +``` + +Format an object into a media type string. This will return a string of the +mime type for the given object. For the properties of the object, see the +documentation for `typer.parse(string)`. + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/media-typer.svg?style=flat +[npm-url]: https://npmjs.org/package/media-typer +[node-version-image]: https://img.shields.io/badge/node.js-%3E%3D_0.6-brightgreen.svg?style=flat +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/media-typer.svg?style=flat +[travis-url]: https://travis-ci.org/jshttp/media-typer +[coveralls-image]: https://img.shields.io/coveralls/jshttp/media-typer.svg?style=flat +[coveralls-url]: https://coveralls.io/r/jshttp/media-typer +[downloads-image]: https://img.shields.io/npm/dm/media-typer.svg?style=flat +[downloads-url]: https://npmjs.org/package/media-typer diff --git a/node_modules/media-typer/index.js b/node_modules/media-typer/index.js new file mode 100644 index 00000000..07f7295e --- /dev/null +++ b/node_modules/media-typer/index.js @@ -0,0 +1,270 @@ +/*! + * media-typer + * Copyright(c) 2014 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * RegExp to match *( ";" parameter ) in RFC 2616 sec 3.7 + * + * parameter = token "=" ( token | quoted-string ) + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + * quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) + * qdtext = > + * quoted-pair = "\" CHAR + * CHAR = + * TEXT = + * LWS = [CRLF] 1*( SP | HT ) + * CRLF = CR LF + * CR = + * LF = + * SP = + * SHT = + * CTL = + * OCTET = + */ +var paramRegExp = /; *([!#$%&'\*\+\-\.0-9A-Z\^_`a-z\|~]+) *= *("(?:[ !\u0023-\u005b\u005d-\u007e\u0080-\u00ff]|\\[\u0020-\u007e])*"|[!#$%&'\*\+\-\.0-9A-Z\^_`a-z\|~]+) */g; +var textRegExp = /^[\u0020-\u007e\u0080-\u00ff]+$/ +var tokenRegExp = /^[!#$%&'\*\+\-\.0-9A-Z\^_`a-z\|~]+$/ + +/** + * RegExp to match quoted-pair in RFC 2616 + * + * quoted-pair = "\" CHAR + * CHAR = + */ +var qescRegExp = /\\([\u0000-\u007f])/g; + +/** + * RegExp to match chars that must be quoted-pair in RFC 2616 + */ +var quoteRegExp = /([\\"])/g; + +/** + * RegExp to match type in RFC 6838 + * + * type-name = restricted-name + * subtype-name = restricted-name + * restricted-name = restricted-name-first *126restricted-name-chars + * restricted-name-first = ALPHA / DIGIT + * restricted-name-chars = ALPHA / DIGIT / "!" / "#" / + * "$" / "&" / "-" / "^" / "_" + * restricted-name-chars =/ "." ; Characters before first dot always + * ; specify a facet name + * restricted-name-chars =/ "+" ; Characters after last plus always + * ; specify a structured syntax suffix + * ALPHA = %x41-5A / %x61-7A ; A-Z / a-z + * DIGIT = %x30-39 ; 0-9 + */ +var subtypeNameRegExp = /^[A-Za-z0-9][A-Za-z0-9!#$&^_.-]{0,126}$/ +var typeNameRegExp = /^[A-Za-z0-9][A-Za-z0-9!#$&^_-]{0,126}$/ +var typeRegExp = /^ *([A-Za-z0-9][A-Za-z0-9!#$&^_-]{0,126})\/([A-Za-z0-9][A-Za-z0-9!#$&^_.+-]{0,126}) *$/; + +/** + * Module exports. + */ + +exports.format = format +exports.parse = parse + +/** + * Format object to media type. + * + * @param {object} obj + * @return {string} + * @api public + */ + +function format(obj) { + if (!obj || typeof obj !== 'object') { + throw new TypeError('argument obj is required') + } + + var parameters = obj.parameters + var subtype = obj.subtype + var suffix = obj.suffix + var type = obj.type + + if (!type || !typeNameRegExp.test(type)) { + throw new TypeError('invalid type') + } + + if (!subtype || !subtypeNameRegExp.test(subtype)) { + throw new TypeError('invalid subtype') + } + + // format as type/subtype + var string = type + '/' + subtype + + // append +suffix + if (suffix) { + if (!typeNameRegExp.test(suffix)) { + throw new TypeError('invalid suffix') + } + + string += '+' + suffix + } + + // append parameters + if (parameters && typeof parameters === 'object') { + var param + var params = Object.keys(parameters).sort() + + for (var i = 0; i < params.length; i++) { + param = params[i] + + if (!tokenRegExp.test(param)) { + throw new TypeError('invalid parameter name') + } + + string += '; ' + param + '=' + qstring(parameters[param]) + } + } + + return string +} + +/** + * Parse media type to object. + * + * @param {string|object} string + * @return {Object} + * @api public + */ + +function parse(string) { + if (!string) { + throw new TypeError('argument string is required') + } + + // support req/res-like objects as argument + if (typeof string === 'object') { + string = getcontenttype(string) + } + + if (typeof string !== 'string') { + throw new TypeError('argument string is required to be a string') + } + + var index = string.indexOf(';') + var type = index !== -1 + ? string.substr(0, index) + : string + + var key + var match + var obj = splitType(type) + var params = {} + var value + + paramRegExp.lastIndex = index + + while (match = paramRegExp.exec(string)) { + if (match.index !== index) { + throw new TypeError('invalid parameter format') + } + + index += match[0].length + key = match[1].toLowerCase() + value = match[2] + + if (value[0] === '"') { + // remove quotes and escapes + value = value + .substr(1, value.length - 2) + .replace(qescRegExp, '$1') + } + + params[key] = value + } + + if (index !== -1 && index !== string.length) { + throw new TypeError('invalid parameter format') + } + + obj.parameters = params + + return obj +} + +/** + * Get content-type from req/res objects. + * + * @param {object} + * @return {Object} + * @api private + */ + +function getcontenttype(obj) { + if (typeof obj.getHeader === 'function') { + // res-like + return obj.getHeader('content-type') + } + + if (typeof obj.headers === 'object') { + // req-like + return obj.headers && obj.headers['content-type'] + } +} + +/** + * Quote a string if necessary. + * + * @param {string} val + * @return {string} + * @api private + */ + +function qstring(val) { + var str = String(val) + + // no need to quote tokens + if (tokenRegExp.test(str)) { + return str + } + + if (str.length > 0 && !textRegExp.test(str)) { + throw new TypeError('invalid parameter value') + } + + return '"' + str.replace(quoteRegExp, '\\$1') + '"' +} + +/** + * Simply "type/subtype+siffx" into parts. + * + * @param {string} string + * @return {Object} + * @api private + */ + +function splitType(string) { + var match = typeRegExp.exec(string.toLowerCase()) + + if (!match) { + throw new TypeError('invalid media type') + } + + var type = match[1] + var subtype = match[2] + var suffix + + // suffix after last + + var index = subtype.lastIndexOf('+') + if (index !== -1) { + suffix = subtype.substr(index + 1) + subtype = subtype.substr(0, index) + } + + var obj = { + type: type, + subtype: subtype, + suffix: suffix + } + + return obj +} diff --git a/node_modules/media-typer/package.json b/node_modules/media-typer/package.json new file mode 100644 index 00000000..8cf3ebcd --- /dev/null +++ b/node_modules/media-typer/package.json @@ -0,0 +1,26 @@ +{ + "name": "media-typer", + "description": "Simple RFC 6838 media type parser and formatter", + "version": "0.3.0", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "repository": "jshttp/media-typer", + "devDependencies": { + "istanbul": "0.3.2", + "mocha": "~1.21.4", + "should": "~4.0.4" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/merge-descriptors/HISTORY.md b/node_modules/merge-descriptors/HISTORY.md new file mode 100644 index 00000000..486771f0 --- /dev/null +++ b/node_modules/merge-descriptors/HISTORY.md @@ -0,0 +1,21 @@ +1.0.1 / 2016-01-17 +================== + + * perf: enable strict mode + +1.0.0 / 2015-03-01 +================== + + * Add option to only add new descriptors + * Add simple argument validation + * Add jsdoc to source file + +0.0.2 / 2013-12-14 +================== + + * Move repository to `component` organization + +0.0.1 / 2013-10-29 +================== + + * Initial release diff --git a/node_modules/merge-descriptors/LICENSE b/node_modules/merge-descriptors/LICENSE new file mode 100644 index 00000000..274bfd82 --- /dev/null +++ b/node_modules/merge-descriptors/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2013 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/merge-descriptors/README.md b/node_modules/merge-descriptors/README.md new file mode 100644 index 00000000..d593c0eb --- /dev/null +++ b/node_modules/merge-descriptors/README.md @@ -0,0 +1,48 @@ +# Merge Descriptors + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Merge objects using descriptors. + +```js +var thing = { + get name() { + return 'jon' + } +} + +var animal = { + +} + +merge(animal, thing) + +animal.name === 'jon' +``` + +## API + +### merge(destination, source) + +Redefines `destination`'s descriptors with `source`'s. + +### merge(destination, source, false) + +Defines `source`'s descriptors on `destination` if `destination` does not have +a descriptor by the same name. + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/merge-descriptors.svg +[npm-url]: https://npmjs.org/package/merge-descriptors +[travis-image]: https://img.shields.io/travis/component/merge-descriptors/master.svg +[travis-url]: https://travis-ci.org/component/merge-descriptors +[coveralls-image]: https://img.shields.io/coveralls/component/merge-descriptors/master.svg +[coveralls-url]: https://coveralls.io/r/component/merge-descriptors?branch=master +[downloads-image]: https://img.shields.io/npm/dm/merge-descriptors.svg +[downloads-url]: https://npmjs.org/package/merge-descriptors diff --git a/node_modules/merge-descriptors/index.js b/node_modules/merge-descriptors/index.js new file mode 100644 index 00000000..573b132e --- /dev/null +++ b/node_modules/merge-descriptors/index.js @@ -0,0 +1,60 @@ +/*! + * merge-descriptors + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = merge + +/** + * Module variables. + * @private + */ + +var hasOwnProperty = Object.prototype.hasOwnProperty + +/** + * Merge the property descriptors of `src` into `dest` + * + * @param {object} dest Object to add descriptors to + * @param {object} src Object to clone descriptors from + * @param {boolean} [redefine=true] Redefine `dest` properties with `src` properties + * @returns {object} Reference to dest + * @public + */ + +function merge(dest, src, redefine) { + if (!dest) { + throw new TypeError('argument dest is required') + } + + if (!src) { + throw new TypeError('argument src is required') + } + + if (redefine === undefined) { + // Default to true + redefine = true + } + + Object.getOwnPropertyNames(src).forEach(function forEachOwnPropertyName(name) { + if (!redefine && hasOwnProperty.call(dest, name)) { + // Skip desriptor + return + } + + // Copy descriptor + var descriptor = Object.getOwnPropertyDescriptor(src, name) + Object.defineProperty(dest, name, descriptor) + }) + + return dest +} diff --git a/node_modules/merge-descriptors/package.json b/node_modules/merge-descriptors/package.json new file mode 100644 index 00000000..514cdbd8 --- /dev/null +++ b/node_modules/merge-descriptors/package.json @@ -0,0 +1,32 @@ +{ + "name": "merge-descriptors", + "description": "Merge objects using descriptors", + "version": "1.0.1", + "author": { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com", + "twitter": "https://twitter.com/jongleberry" + }, + "contributors": [ + "Douglas Christopher Wilson ", + "Mike Grabowski " + ], + "license": "MIT", + "repository": "component/merge-descriptors", + "devDependencies": { + "istanbul": "0.4.1", + "mocha": "1.21.5" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-ci": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/" + } +} diff --git a/node_modules/methods/HISTORY.md b/node_modules/methods/HISTORY.md new file mode 100644 index 00000000..c0ecf072 --- /dev/null +++ b/node_modules/methods/HISTORY.md @@ -0,0 +1,29 @@ +1.1.2 / 2016-01-17 +================== + + * perf: enable strict mode + +1.1.1 / 2014-12-30 +================== + + * Improve `browserify` support + +1.1.0 / 2014-07-05 +================== + + * Add `CONNECT` method + +1.0.1 / 2014-06-02 +================== + + * Fix module to work with harmony transform + +1.0.0 / 2014-05-08 +================== + + * Add `PURGE` method + +0.1.0 / 2013-10-28 +================== + + * Add `http.METHODS` support diff --git a/node_modules/methods/LICENSE b/node_modules/methods/LICENSE new file mode 100644 index 00000000..220dc1a2 --- /dev/null +++ b/node_modules/methods/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2013-2014 TJ Holowaychuk +Copyright (c) 2015-2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/methods/README.md b/node_modules/methods/README.md new file mode 100644 index 00000000..672a32bf --- /dev/null +++ b/node_modules/methods/README.md @@ -0,0 +1,51 @@ +# Methods + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +HTTP verbs that Node.js core's HTTP parser supports. + +This module provides an export that is just like `http.METHODS` from Node.js core, +with the following differences: + + * All method names are lower-cased. + * Contains a fallback list of methods for Node.js versions that do not have a + `http.METHODS` export (0.10 and lower). + * Provides the fallback list when using tools like `browserify` without pulling + in the `http` shim module. + +## Install + +```bash +$ npm install methods +``` + +## API + +```js +var methods = require('methods') +``` + +### methods + +This is an array of lower-cased method names that Node.js supports. If Node.js +provides the `http.METHODS` export, then this is the same array lower-cased, +otherwise it is a snapshot of the verbs from Node.js 0.10. + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/methods.svg?style=flat +[npm-url]: https://npmjs.org/package/methods +[node-version-image]: https://img.shields.io/node/v/methods.svg?style=flat +[node-version-url]: https://nodejs.org/en/download/ +[travis-image]: https://img.shields.io/travis/jshttp/methods.svg?style=flat +[travis-url]: https://travis-ci.org/jshttp/methods +[coveralls-image]: https://img.shields.io/coveralls/jshttp/methods.svg?style=flat +[coveralls-url]: https://coveralls.io/r/jshttp/methods?branch=master +[downloads-image]: https://img.shields.io/npm/dm/methods.svg?style=flat +[downloads-url]: https://npmjs.org/package/methods diff --git a/node_modules/methods/index.js b/node_modules/methods/index.js new file mode 100644 index 00000000..667a50bd --- /dev/null +++ b/node_modules/methods/index.js @@ -0,0 +1,69 @@ +/*! + * methods + * Copyright(c) 2013-2014 TJ Holowaychuk + * Copyright(c) 2015-2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var http = require('http'); + +/** + * Module exports. + * @public + */ + +module.exports = getCurrentNodeMethods() || getBasicNodeMethods(); + +/** + * Get the current Node.js methods. + * @private + */ + +function getCurrentNodeMethods() { + return http.METHODS && http.METHODS.map(function lowerCaseMethod(method) { + return method.toLowerCase(); + }); +} + +/** + * Get the "basic" Node.js methods, a snapshot from Node.js 0.10. + * @private + */ + +function getBasicNodeMethods() { + return [ + 'get', + 'post', + 'put', + 'head', + 'delete', + 'options', + 'trace', + 'copy', + 'lock', + 'mkcol', + 'move', + 'purge', + 'propfind', + 'proppatch', + 'unlock', + 'report', + 'mkactivity', + 'checkout', + 'merge', + 'm-search', + 'notify', + 'subscribe', + 'unsubscribe', + 'patch', + 'search', + 'connect' + ]; +} diff --git a/node_modules/methods/package.json b/node_modules/methods/package.json new file mode 100644 index 00000000..c4ce6f05 --- /dev/null +++ b/node_modules/methods/package.json @@ -0,0 +1,36 @@ +{ + "name": "methods", + "description": "HTTP methods that node supports", + "version": "1.1.2", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)", + "TJ Holowaychuk (http://tjholowaychuk.com)" + ], + "license": "MIT", + "repository": "jshttp/methods", + "devDependencies": { + "istanbul": "0.4.1", + "mocha": "1.21.5" + }, + "files": [ + "index.js", + "HISTORY.md", + "LICENSE" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + }, + "browser": { + "http": false + }, + "keywords": [ + "http", + "methods" + ] +} diff --git a/node_modules/micromatch/CHANGELOG.md b/node_modules/micromatch/CHANGELOG.md new file mode 100644 index 00000000..9d8e5ed0 --- /dev/null +++ b/node_modules/micromatch/CHANGELOG.md @@ -0,0 +1,37 @@ +## History + +### key + +Changelog entries are classified using the following labels _(from [keep-a-changelog][]_): + +- `added`: for new features +- `changed`: for changes in existing functionality +- `deprecated`: for once-stable features removed in upcoming releases +- `removed`: for deprecated features removed in this release +- `fixed`: for any bug fixes +- `bumped`: updated dependencies, only minor or higher will be listed. + +### [3.0.0] - 2017-04-11 + +TODO. There should be no breaking changes. Please report any regressions. I will [reformat these release notes](https://github.com/micromatch/micromatch/pull/76) and add them to the changelog as soon as I have a chance. + +### [1.0.1] - 2016-12-12 + +**Added** + +- Support for windows path edge cases where backslashes are used in brackets or other unusual combinations. + +### [1.0.0] - 2016-12-12 + +Stable release. + +### [0.1.0] - 2016-10-08 + +First release. + + +[Unreleased]: https://github.com/jonschlinkert/micromatch/compare/0.1.0...HEAD +[0.2.0]: https://github.com/jonschlinkert/micromatch/compare/0.1.0...0.2.0 + +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog + diff --git a/node_modules/micromatch/LICENSE b/node_modules/micromatch/LICENSE new file mode 100755 index 00000000..d32ab442 --- /dev/null +++ b/node_modules/micromatch/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/micromatch/README.md b/node_modules/micromatch/README.md new file mode 100644 index 00000000..5dfa1498 --- /dev/null +++ b/node_modules/micromatch/README.md @@ -0,0 +1,1150 @@ +# micromatch [![NPM version](https://img.shields.io/npm/v/micromatch.svg?style=flat)](https://www.npmjs.com/package/micromatch) [![NPM monthly downloads](https://img.shields.io/npm/dm/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![NPM total downloads](https://img.shields.io/npm/dt/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![Linux Build Status](https://img.shields.io/travis/micromatch/micromatch.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/micromatch) [![Windows Build Status](https://img.shields.io/appveyor/ci/micromatch/micromatch.svg?style=flat&label=AppVeyor)](https://ci.appveyor.com/project/micromatch/micromatch) + +> Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Table of Contents + +
+Details + +- [Install](#install) +- [Quickstart](#quickstart) +- [Why use micromatch?](#why-use-micromatch) + * [Matching features](#matching-features) +- [Switching to micromatch](#switching-to-micromatch) + * [From minimatch](#from-minimatch) + * [From multimatch](#from-multimatch) +- [API](#api) +- [Options](#options) + * [options.basename](#optionsbasename) + * [options.bash](#optionsbash) + * [options.cache](#optionscache) + * [options.dot](#optionsdot) + * [options.failglob](#optionsfailglob) + * [options.ignore](#optionsignore) + * [options.matchBase](#optionsmatchbase) + * [options.nobrace](#optionsnobrace) + * [options.nocase](#optionsnocase) + * [options.nodupes](#optionsnodupes) + * [options.noext](#optionsnoext) + * [options.nonegate](#optionsnonegate) + * [options.noglobstar](#optionsnoglobstar) + * [options.nonull](#optionsnonull) + * [options.nullglob](#optionsnullglob) + * [options.snapdragon](#optionssnapdragon) + * [options.sourcemap](#optionssourcemap) + * [options.unescape](#optionsunescape) + * [options.unixify](#optionsunixify) +- [Extended globbing](#extended-globbing) + * [extglobs](#extglobs) + * [braces](#braces) + * [regex character classes](#regex-character-classes) + * [regex groups](#regex-groups) + * [POSIX bracket expressions](#posix-bracket-expressions) +- [Notes](#notes) + * [Bash 4.3 parity](#bash-43-parity) + * [Backslashes](#backslashes) +- [Contributing](#contributing) +- [Benchmarks](#benchmarks) + * [Running benchmarks](#running-benchmarks) + * [Latest results](#latest-results) +- [About](#about) + +
+ +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save micromatch +``` + +## Quickstart + +```js +var mm = require('micromatch'); +mm(list, patterns[, options]); +``` + +The [main export](#micromatch) takes a list of strings and one or more glob patterns: + +```js +console.log(mm(['foo', 'bar', 'qux'], ['f*', 'b*'])); +//=> ['foo', 'bar'] +``` + +Use [.isMatch()](#ismatch) to get true/false: + +```js +console.log(mm.isMatch('foo', 'f*')); +//=> true +``` + +[Switching](#switching-to-micromatch) from minimatch and multimatch is easy! + +## Why use micromatch? + +> micromatch is a [drop-in replacement](#switching-to-micromatch) for minimatch and multimatch + +* Supports all of the same matching features as [minimatch](https://github.com/isaacs/minimatch) and [multimatch](https://github.com/sindresorhus/multimatch) +* Micromatch uses [snapdragon](https://github.com/jonschlinkert/snapdragon) for parsing and compiling globs, which provides granular control over the entire conversion process in a way that is easy to understand, reason about, and maintain. +* More consistently accurate matching [than minimatch](https://github.com/yarnpkg/yarn/pull/3339), with more than 36,000 [test assertions](./test) to prove it. +* More complete support for the Bash 4.3 specification than minimatch and multimatch. In fact, micromatch passes _all of the spec tests_ from bash, including some that bash still fails. +* [Faster matching](#benchmarks), from a combination of optimized glob patterns, faster algorithms, and regex caching. +* [Micromatch is safer](https://github.com/micromatch/braces#braces-is-safe), and is not subject to DoS with brace patterns, like minimatch and multimatch. +* More reliable windows support than minimatch and multimatch. + +### Matching features + +* Support for multiple glob patterns (no need for wrappers like multimatch) +* Wildcards (`**`, `*.js`) +* Negation (`'!a/*.js'`, `'*!(b).js']`) +* [extglobs](https://github.com/micromatch/extglob) (`+(x|y)`, `!(a|b)`) +* [POSIX character classes](https://github.com/micromatch/expand-brackets) (`[[:alpha:][:digit:]]`) +* [brace expansion](https://github.com/micromatch/braces) (`foo/{1..5}.md`, `bar/{a,b,c}.js`) +* regex character classes (`foo-[1-5].js`) +* regex logical "or" (`foo/(abc|xyz).js`) + +You can mix and match these features to create whatever patterns you need! + +## Switching to micromatch + +There is one notable difference between micromatch and minimatch in regards to how backslashes are handled. See [the notes about backslashes](#backslashes) for more information. + +### From minimatch + +Use [mm.isMatch()](#ismatch) instead of `minimatch()`: + +```js +mm.isMatch('foo', 'b*'); +//=> false +``` + +Use [mm.match()](#match) instead of `minimatch.match()`: + +```js +mm.match(['foo', 'bar'], 'b*'); +//=> 'bar' +``` + +### From multimatch + +Same signature: + +```js +mm(['foo', 'bar', 'baz'], ['f*', '*z']); +//=> ['foo', 'baz'] +``` + +## API + +### [micromatch](index.js#L41) + +The main function takes a list of strings and one or more glob patterns to use for matching. + +**Params** + +* `list` **{Array}**: A list of strings to match +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array}**: Returns an array of matches + +**Example** + +```js +var mm = require('micromatch'); +mm(list, patterns[, options]); + +console.log(mm(['a.js', 'a.txt'], ['*.js'])); +//=> [ 'a.js' ] +``` + +### [.match](index.js#L93) + +Similar to the main function, but `pattern` must be a string. + +**Params** + +* `list` **{Array}**: Array of strings to match +* `pattern` **{String}**: Glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array}**: Returns an array of matches + +**Example** + +```js +var mm = require('micromatch'); +mm.match(list, pattern[, options]); + +console.log(mm.match(['a.a', 'a.aa', 'a.b', 'a.c'], '*.a')); +//=> ['a.a', 'a.aa'] +``` + +### [.isMatch](index.js#L154) + +Returns true if the specified `string` matches the given glob `pattern`. + +**Params** + +* `string` **{String}**: String to match +* `pattern` **{String}**: Glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if the string matches the glob pattern. + +**Example** + +```js +var mm = require('micromatch'); +mm.isMatch(string, pattern[, options]); + +console.log(mm.isMatch('a.a', '*.a')); +//=> true +console.log(mm.isMatch('a.b', '*.a')); +//=> false +``` + +### [.some](index.js#L192) + +Returns true if some of the strings in the given `list` match any of the given glob `patterns`. + +**Params** + +* `list` **{String|Array}**: The string or array of strings to test. Returns as soon as the first match is found. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +var mm = require('micromatch'); +mm.some(list, patterns[, options]); + +console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); +// true +console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); +// false +``` + +### [.every](index.js#L228) + +Returns true if every string in the given `list` matches any of the given glob `patterns`. + +**Params** + +* `list` **{String|Array}**: The string or array of strings to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +var mm = require('micromatch'); +mm.every(list, patterns[, options]); + +console.log(mm.every('foo.js', ['foo.js'])); +// true +console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); +// true +console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); +// false +console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); +// false +``` + +### [.any](index.js#L260) + +Returns true if **any** of the given glob `patterns` match the specified `string`. + +**Params** + +* `str` **{String|Array}**: The string to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +var mm = require('micromatch'); +mm.any(string, patterns[, options]); + +console.log(mm.any('a.a', ['b.*', '*.a'])); +//=> true +console.log(mm.any('a.a', 'b.*')); +//=> false +``` + +### [.all](index.js#L308) + +Returns true if **all** of the given `patterns` match the specified string. + +**Params** + +* `str` **{String|Array}**: The string to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +var mm = require('micromatch'); +mm.all(string, patterns[, options]); + +console.log(mm.all('foo.js', ['foo.js'])); +// true + +console.log(mm.all('foo.js', ['*.js', '!foo.js'])); +// false + +console.log(mm.all('foo.js', ['*.js', 'foo.js'])); +// true + +console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); +// true +``` + +### [.not](index.js#L340) + +Returns a list of strings that _**do not match any**_ of the given `patterns`. + +**Params** + +* `list` **{Array}**: Array of strings to match. +* `patterns` **{String|Array}**: One or more glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array}**: Returns an array of strings that **do not match** the given patterns. + +**Example** + +```js +var mm = require('micromatch'); +mm.not(list, patterns[, options]); + +console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); +//=> ['b.b', 'c.c'] +``` + +### [.contains](index.js#L376) + +Returns true if the given `string` contains the given pattern. Similar to [.isMatch](#isMatch) but the pattern can match any part of the string. + +**Params** + +* `str` **{String}**: The string to match. +* `patterns` **{String|Array}**: Glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if the patter matches any part of `str`. + +**Example** + +```js +var mm = require('micromatch'); +mm.contains(string, pattern[, options]); + +console.log(mm.contains('aa/bb/cc', '*b')); +//=> true +console.log(mm.contains('aa/bb/cc', '*d')); +//=> false +``` + +### [.matchKeys](index.js#L432) + +Filter the keys of the given object with the given `glob` pattern and `options`. Does not attempt to match nested keys. If you need this feature, use [glob-object](https://github.com/jonschlinkert/glob-object) instead. + +**Params** + +* `object` **{Object}**: The object with keys to filter. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Object}**: Returns an object with only keys that match the given patterns. + +**Example** + +```js +var mm = require('micromatch'); +mm.matchKeys(object, patterns[, options]); + +var obj = { aa: 'a', ab: 'b', ac: 'c' }; +console.log(mm.matchKeys(obj, '*b')); +//=> { ab: 'b' } +``` + +### [.matcher](index.js#L461) + +Returns a memoized matcher function from the given glob `pattern` and `options`. The returned function takes a string to match as its only argument and returns true if the string is a match. + +**Params** + +* `pattern` **{String}**: Glob pattern +* `options` **{Object}**: See available [options](#options) for changing how matches are performed. +* `returns` **{Function}**: Returns a matcher function. + +**Example** + +```js +var mm = require('micromatch'); +mm.matcher(pattern[, options]); + +var isMatch = mm.matcher('*.!(*a)'); +console.log(isMatch('a.a')); +//=> false +console.log(isMatch('a.b')); +//=> true +``` + +### [.capture](index.js#L536) + +Returns an array of matches captured by `pattern` in `string, or`null` if the pattern did not match. + +**Params** + +* `pattern` **{String}**: Glob pattern to use for matching. +* `string` **{String}**: String to match +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns an array of captures if the string matches the glob pattern, otherwise `null`. + +**Example** + +```js +var mm = require('micromatch'); +mm.capture(pattern, string[, options]); + +console.log(mm.capture('test/*.js', 'test/foo.js')); +//=> ['foo'] +console.log(mm.capture('test/*.js', 'foo/bar.css')); +//=> null +``` + +### [.makeRe](index.js#L571) + +Create a regular expression from the given glob `pattern`. + +**Params** + +* `pattern` **{String}**: A glob pattern to convert to regex. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed. +* `returns` **{RegExp}**: Returns a regex created from the given pattern. + +**Example** + +```js +var mm = require('micromatch'); +mm.makeRe(pattern[, options]); + +console.log(mm.makeRe('*.js')); +//=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ +``` + +### [.braces](index.js#L618) + +Expand the given brace `pattern`. + +**Params** + +* `pattern` **{String}**: String with brace pattern to expand. +* `options` **{Object}**: Any [options](#options) to change how expansion is performed. See the [braces](https://github.com/micromatch/braces) library for all available options. +* `returns` **{Array}** + +**Example** + +```js +var mm = require('micromatch'); +console.log(mm.braces('foo/{a,b}/bar')); +//=> ['foo/(a|b)/bar'] + +console.log(mm.braces('foo/{a,b}/bar', {expand: true})); +//=> ['foo/(a|b)/bar'] +``` + +### [.create](index.js#L685) + +Parses the given glob `pattern` and returns an array of abstract syntax trees (ASTs), with the compiled `output` and optional source `map` on each AST. + +**Params** + +* `pattern` **{String}**: Glob pattern to parse and compile. +* `options` **{Object}**: Any [options](#options) to change how parsing and compiling is performed. +* `returns` **{Object}**: Returns an object with the parsed AST, compiled string and optional source map. + +**Example** + +```js +var mm = require('micromatch'); +mm.create(pattern[, options]); + +console.log(mm.create('abc/*.js')); +// [{ options: { source: 'string', sourcemap: true }, +// state: {}, +// compilers: +// { ... }, +// output: '(\\.[\\\\\\/])?abc\\/(?!\\.)(?=.)[^\\/]*?\\.js', +// ast: +// { type: 'root', +// errors: [], +// nodes: +// [ ... ], +// dot: false, +// input: 'abc/*.js' }, +// parsingErrors: [], +// map: +// { version: 3, +// sources: [ 'string' ], +// names: [], +// mappings: 'AAAA,GAAG,EAAC,kBAAC,EAAC,EAAE', +// sourcesContent: [ 'abc/*.js' ] }, +// position: { line: 1, column: 28 }, +// content: {}, +// files: {}, +// idx: 6 }] +``` + +### [.parse](index.js#L732) + +Parse the given `str` with the given `options`. + +**Params** + +* `str` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an AST + +**Example** + +```js +var mm = require('micromatch'); +mm.parse(pattern[, options]); + +var ast = mm.parse('a/{b,c}/d'); +console.log(ast); +// { type: 'root', +// errors: [], +// input: 'a/{b,c}/d', +// nodes: +// [ { type: 'bos', val: '' }, +// { type: 'text', val: 'a/' }, +// { type: 'brace', +// nodes: +// [ { type: 'brace.open', val: '{' }, +// { type: 'text', val: 'b,c' }, +// { type: 'brace.close', val: '}' } ] }, +// { type: 'text', val: '/d' }, +// { type: 'eos', val: '' } ] } +``` + +### [.compile](index.js#L780) + +Compile the given `ast` or string with the given `options`. + +**Params** + +* `ast` **{Object|String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object that has an `output` property with the compiled string. + +**Example** + +```js +var mm = require('micromatch'); +mm.compile(ast[, options]); + +var ast = mm.parse('a/{b,c}/d'); +console.log(mm.compile(ast)); +// { options: { source: 'string' }, +// state: {}, +// compilers: +// { eos: [Function], +// noop: [Function], +// bos: [Function], +// brace: [Function], +// 'brace.open': [Function], +// text: [Function], +// 'brace.close': [Function] }, +// output: [ 'a/(b|c)/d' ], +// ast: +// { ... }, +// parsingErrors: [] } +``` + +### [.clearCache](index.js#L801) + +Clear the regex cache. + +**Example** + +```js +mm.clearCache(); +``` + +## Options + +* [basename](#optionsbasename) +* [bash](#optionsbash) +* [cache](#optionscache) +* [dot](#optionsdot) +* [failglob](#optionsfailglob) +* [ignore](#optionsignore) +* [matchBase](#optionsmatchBase) +* [nobrace](#optionsnobrace) +* [nocase](#optionsnocase) +* [nodupes](#optionsnodupes) +* [noext](#optionsnoext) +* [noglobstar](#optionsnoglobstar) +* [nonull](#optionsnonull) +* [nullglob](#optionsnullglob) +* [snapdragon](#optionssnapdragon) +* [sourcemap](#optionssourcemap) +* [unescape](#optionsunescape) +* [unixify](#optionsunixify) + +### options.basename + +Allow glob patterns without slashes to match a file path based on its basename. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `matchBase`. + +**Type**: `Boolean` + +**Default**: `false` + +**Example** + +```js +mm(['a/b.js', 'a/c.md'], '*.js'); +//=> [] + +mm(['a/b.js', 'a/c.md'], '*.js', {matchBase: true}); +//=> ['a/b.js'] +``` + +### options.bash + +Enabled by default, this option enforces bash-like behavior with stars immediately following a bracket expression. Bash bracket expressions are similar to regex character classes, but unlike regex, a star following a bracket expression **does not repeat the bracketed characters**. Instead, the star is treated the same as an other star. + +**Type**: `Boolean` + +**Default**: `true` + +**Example** + +```js +var files = ['abc', 'ajz']; +console.log(mm(files, '[a-c]*')); +//=> ['abc', 'ajz'] + +console.log(mm(files, '[a-c]*', {bash: false})); +``` + +### options.cache + +Disable regex and function memoization. + +**Type**: `Boolean` + +**Default**: `undefined` + +### options.dot + +Match dotfiles. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `dot`. + +**Type**: `Boolean` + +**Default**: `false` + +### options.failglob + +Similar to the `--failglob` behavior in Bash, throws an error when no matches are found. + +**Type**: `Boolean` + +**Default**: `undefined` + +### options.ignore + +String or array of glob patterns to match files to ignore. + +**Type**: `String|Array` + +**Default**: `undefined` + +### options.matchBase + +Alias for [options.basename](#options-basename). + +### options.nobrace + +Disable expansion of brace patterns. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `nobrace`. + +**Type**: `Boolean` + +**Default**: `undefined` + +See [braces](https://github.com/micromatch/braces) for more information about extended brace expansion. + +### options.nocase + +Use a case-insensitive regex for matching files. Same behavior as [minimatch](https://github.com/isaacs/minimatch). + +**Type**: `Boolean` + +**Default**: `undefined` + +### options.nodupes + +Remove duplicate elements from the result array. + +**Type**: `Boolean` + +**Default**: `undefined` + +**Example** + +Example of using the `unescape` and `nodupes` options together: + +```js +mm.match(['a/b/c', 'a/b/c'], 'a/b/c'); +//=> ['a/b/c', 'a/b/c'] + +mm.match(['a/b/c', 'a/b/c'], 'a/b/c', {nodupes: true}); +//=> ['abc'] +``` + +### options.noext + +Disable extglob support, so that extglobs are regarded as literal characters. + +**Type**: `Boolean` + +**Default**: `undefined` + +**Examples** + +```js +mm(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)'); +//=> ['a/b', 'a/!(z)'] + +mm(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)', {noext: true}); +//=> ['a/!(z)'] (matches only as literal characters) +``` + +### options.nonegate + +Disallow negation (`!`) patterns, and treat leading `!` as a literal character to match. + +**Type**: `Boolean` + +**Default**: `undefined` + +### options.noglobstar + +Disable matching with globstars (`**`). + +**Type**: `Boolean` + +**Default**: `undefined` + +```js +mm(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**'); +//=> ['a/b', 'a/b/c', 'a/b/c/d'] + +mm(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**', {noglobstar: true}); +//=> ['a/b'] +``` + +### options.nonull + +Alias for [options.nullglob](#options-nullglob). + +### options.nullglob + +If `true`, when no matches are found the actual (arrayified) glob pattern is returned instead of an empty array. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `nonull`. + +**Type**: `Boolean` + +**Default**: `undefined` + +### options.snapdragon + +Pass your own instance of [snapdragon](https://github.com/jonschlinkert/snapdragon), to customize parsers or compilers. + +**Type**: `Object` + +**Default**: `undefined` + +### options.sourcemap + +Generate a source map by enabling the `sourcemap` option with the `.parse`, `.compile`, or `.create` methods. + +_(Note that sourcemaps are currently not enabled for brace patterns)_ + +**Examples** + +``` js +var mm = require('micromatch'); +var pattern = '*(*(of*(a)x)z)'; + +var res = mm.create('abc/*.js', {sourcemap: true}); +console.log(res.map); +// { version: 3, +// sources: [ 'string' ], +// names: [], +// mappings: 'AAAA,GAAG,EAAC,iBAAC,EAAC,EAAE', +// sourcesContent: [ 'abc/*.js' ] } + +var ast = mm.parse('abc/**/*.js'); +var res = mm.compile(ast, {sourcemap: true}); +console.log(res.map); +// { version: 3, +// sources: [ 'string' ], +// names: [], +// mappings: 'AAAA,GAAG,EAAC,2BAAE,EAAC,iBAAC,EAAC,EAAE', +// sourcesContent: [ 'abc/**/*.js' ] } + +var ast = mm.parse(pattern); +var res = mm.compile(ast, {sourcemap: true}); +console.log(res.map); +// { version: 3, +// sources: [ 'string' ], +// names: [], +// mappings: 'AAAA,CAAE,CAAE,EAAE,CAAE,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC', +// sourcesContent: [ '*(*(of*(a)x)z)' ] } +``` + +### options.unescape + +Remove backslashes from returned matches. + +**Type**: `Boolean` + +**Default**: `undefined` + +**Example** + +In this example we want to match a literal `*`: + +```js +mm.match(['abc', 'a\\*c'], 'a\\*c'); +//=> ['a\\*c'] + +mm.match(['abc', 'a\\*c'], 'a\\*c', {unescape: true}); +//=> ['a*c'] +``` + +### options.unixify + +Convert path separators on returned files to posix/unix-style forward slashes. + +**Type**: `Boolean` + +**Default**: `true` on windows, `false` everywhere else + +**Example** + +```js +mm.match(['a\\b\\c'], 'a/**'); +//=> ['a/b/c'] + +mm.match(['a\\b\\c'], {unixify: false}); +//=> ['a\\b\\c'] +``` + +## Extended globbing + +Micromatch also supports extended globbing features. + +### extglobs + +Extended globbing, as described by the bash man page: + +| **pattern** | **regex equivalent** | **description** | +| --- | --- | --- | +| `?(pattern)` | `(pattern)?` | Matches zero or one occurrence of the given patterns | +| `*(pattern)` | `(pattern)*` | Matches zero or more occurrences of the given patterns | +| `+(pattern)` | `(pattern)+` | Matches one or more occurrences of the given patterns | +| `@(pattern)` | `(pattern)` * | Matches one of the given patterns | +| `!(pattern)` | N/A (equivalent regex is much more complicated) | Matches anything except one of the given patterns | + +* Note that `@` isn't a RegEx character. + +Powered by [extglob](https://github.com/micromatch/extglob). Visit that library for the full range of options or to report extglob related issues. + +### braces + +Brace patterns can be used to match specific ranges or sets of characters. For example, the pattern `*/{1..3}/*` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +baz/1/qux +baz/2/qux +baz/3/qux +``` + +Visit [braces](https://github.com/micromatch/braces) to see the full range of features and options related to brace expansion, or to create brace matching or expansion related issues. + +### regex character classes + +Given the list: `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: + +* `[ac].js`: matches both `a` and `c`, returning `['a.js', 'c.js']` +* `[b-d].js`: matches from `b` to `d`, returning `['b.js', 'c.js', 'd.js']` +* `[b-d].js`: matches from `b` to `d`, returning `['b.js', 'c.js', 'd.js']` +* `a/[A-Z].js`: matches and uppercase letter, returning `['a/E.md']` + +Learn about [regex character classes](http://www.regular-expressions.info/charclass.html). + +### regex groups + +Given `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: + +* `(a|c).js`: would match either `a` or `c`, returning `['a.js', 'c.js']` +* `(b|d).js`: would match either `b` or `d`, returning `['b.js', 'd.js']` +* `(b|[A-Z]).js`: would match either `b` or an uppercase letter, returning `['b.js', 'E.js']` + +As with regex, parens can be nested, so patterns like `((a|b)|c)/b` will work. Although brace expansion might be friendlier to use, depending on preference. + +### POSIX bracket expressions + +POSIX brackets are intended to be more user-friendly than regex character classes. This of course is in the eye of the beholder. + +**Example** + +```js +mm.isMatch('a1', '[[:alpha:][:digit:]]'); +//=> true + +mm.isMatch('a1', '[[:alpha:][:alpha:]]'); +//=> false +``` + +See [expand-brackets](https://github.com/jonschlinkert/expand-brackets) for more information about bracket expressions. + +*** + +## Notes + +### Bash 4.3 parity + +Whenever possible matching behavior is based on behavior Bash 4.3, which is mostly consistent with minimatch. + +However, it's suprising how many edge cases and rabbit holes there are with glob matching, and since there is no real glob specification, and micromatch is more accurate than both Bash and minimatch, there are cases where best-guesses were made for behavior. In a few cases where Bash had no answers, we used wildmatch (used by git) as a fallback. + +### Backslashes + +There is an important, notable difference between minimatch and micromatch _in regards to how backslashes are handled_ in glob patterns. + +* Micromatch exclusively and explicitly reserves backslashes for escaping characters in a glob pattern, even on windows. This is consistent with bash behavior. +* Minimatch converts all backslashes to forward slashes, which means you can't use backslashes to escape any characters in your glob patterns. + +We made this decision for micromatch for a couple of reasons: + +* consistency with bash conventions. +* glob patterns are not filepaths. They are a type of [regular language](https://en.wikipedia.org/wiki/Regular_language) that is converted to a JavaScript regular expression. Thus, when forward slashes are defined in a glob pattern, the resulting regular expression will match windows or POSIX path separators just fine. + +**A note about joining paths to globs** + +Note that when you pass something like `path.join('foo', '*')` to micromatch, you are creating a filepath and expecting it to still work as a glob pattern. This causes problems on windows, since the `path.sep` is `\\`. + +In other words, since `\\` is reserved as an escape character in globs, on windows `path.join('foo', '*')` would result in `foo\\*`, which tells micromatch to match `*` as a literal character. This is the same behavior as bash. + +## Contributing + +All contributions are welcome! Please read [the contributing guide](.github/contributing.md) to get started. + +**Bug reports** + +Please create an issue if you encounter a bug or matching behavior that doesn't seem correct. If you find a matching-related issue, please: + +* [research existing issues first](../../issues) (open and closed) +* visit the [GNU Bash documentation](https://www.gnu.org/software/bash/manual/) to see how Bash deals with the pattern +* visit the [minimatch](https://github.com/isaacs/minimatch) documentation to cross-check expected behavior in node.js +* if all else fails, since there is no real specification for globs we will probably need to discuss expected behavior and decide how to resolve it. which means any detail you can provide to help with this discussion would be greatly appreciated. + +**Platform issues** + +It's important to us that micromatch work consistently on all platforms. If you encounter any platform-specific matching or path related issues, please let us know (pull requests are also greatly appreciated). + +## Benchmarks + +### Running benchmarks + +Install dev dependencies: + +```bash +npm i -d && npm run benchmark +``` + +### Latest results + +As of February 18, 2018 (longer bars are better): + +```sh +# braces-globstar-large-list (485691 bytes) + micromatch ██████████████████████████████████████████████████ (517 ops/sec ±0.49%) + minimatch █ (18.92 ops/sec ±0.54%) + multimatch █ (18.94 ops/sec ±0.62%) + + micromatch is faster by an avg. of 2,733% + +# braces-multiple (3362 bytes) + micromatch ██████████████████████████████████████████████████ (33,625 ops/sec ±0.45%) + minimatch (2.92 ops/sec ±3.26%) + multimatch (2.90 ops/sec ±2.76%) + + micromatch is faster by an avg. of 1,156,935% + +# braces-range (727 bytes) + micromatch █████████████████████████████████████████████████ (155,220 ops/sec ±0.56%) + minimatch ██████ (20,186 ops/sec ±1.27%) + multimatch ██████ (19,809 ops/sec ±0.60%) + + micromatch is faster by an avg. of 776% + +# braces-set (2858 bytes) + micromatch █████████████████████████████████████████████████ (24,354 ops/sec ±0.92%) + minimatch █████ (2,566 ops/sec ±0.56%) + multimatch ████ (2,431 ops/sec ±1.25%) + + micromatch is faster by an avg. of 975% + +# globstar-large-list (485686 bytes) + micromatch █████████████████████████████████████████████████ (504 ops/sec ±0.45%) + minimatch ███ (33.36 ops/sec ±1.08%) + multimatch ███ (33.19 ops/sec ±1.35%) + + micromatch is faster by an avg. of 1,514% + +# globstar-long-list (90647 bytes) + micromatch ██████████████████████████████████████████████████ (2,694 ops/sec ±1.08%) + minimatch ████████████████ (870 ops/sec ±1.09%) + multimatch ████████████████ (862 ops/sec ±0.84%) + + micromatch is faster by an avg. of 311% + +# globstar-short-list (182 bytes) + micromatch ██████████████████████████████████████████████████ (328,921 ops/sec ±1.06%) + minimatch █████████ (64,808 ops/sec ±1.42%) + multimatch ████████ (57,991 ops/sec ±2.11%) + + micromatch is faster by an avg. of 536% + +# no-glob (701 bytes) + micromatch █████████████████████████████████████████████████ (415,935 ops/sec ±0.36%) + minimatch ███████████ (92,730 ops/sec ±1.44%) + multimatch █████████ (81,958 ops/sec ±2.13%) + + micromatch is faster by an avg. of 476% + +# star-basename-long (12339 bytes) + micromatch █████████████████████████████████████████████████ (7,963 ops/sec ±0.36%) + minimatch ███████████████████████████████ (5,072 ops/sec ±0.83%) + multimatch ███████████████████████████████ (5,028 ops/sec ±0.40%) + + micromatch is faster by an avg. of 158% + +# star-basename-short (349 bytes) + micromatch ██████████████████████████████████████████████████ (269,552 ops/sec ±0.70%) + minimatch ██████████████████████ (122,457 ops/sec ±1.39%) + multimatch ████████████████████ (110,788 ops/sec ±1.99%) + + micromatch is faster by an avg. of 231% + +# star-folder-long (19207 bytes) + micromatch █████████████████████████████████████████████████ (3,806 ops/sec ±0.38%) + minimatch ████████████████████████████ (2,204 ops/sec ±0.32%) + multimatch ██████████████████████████ (2,020 ops/sec ±1.07%) + + micromatch is faster by an avg. of 180% + +# star-folder-short (551 bytes) + micromatch ██████████████████████████████████████████████████ (249,077 ops/sec ±0.40%) + minimatch ███████████ (59,431 ops/sec ±1.67%) + multimatch ███████████ (55,569 ops/sec ±1.43%) + + micromatch is faster by an avg. of 433% +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [braces](https://www.npmjs.com/package/braces): Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support… [more](https://github.com/micromatch/braces) | [homepage](https://github.com/micromatch/braces "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.") +* [expand-brackets](https://www.npmjs.com/package/expand-brackets): Expand POSIX bracket expressions (character classes) in glob patterns. | [homepage](https://github.com/jonschlinkert/expand-brackets "Expand POSIX bracket expressions (character classes) in glob patterns.") +* [extglob](https://www.npmjs.com/package/extglob): Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob… [more](https://github.com/micromatch/extglob) | [homepage](https://github.com/micromatch/extglob "Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob patterns.") +* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") +* [nanomatch](https://www.npmjs.com/package/nanomatch): Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash… [more](https://github.com/micromatch/nanomatch) | [homepage](https://github.com/micromatch/nanomatch "Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash 4.3 wildcard support only (no support for exglobs, posix brackets or braces)") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 457 | [jonschlinkert](https://github.com/jonschlinkert) | +| 12 | [es128](https://github.com/es128) | +| 8 | [doowb](https://github.com/doowb) | +| 3 | [paulmillr](https://github.com/paulmillr) | +| 2 | [TrySound](https://github.com/TrySound) | +| 2 | [MartinKolarik](https://github.com/MartinKolarik) | +| 2 | [charlike-old](https://github.com/charlike-old) | +| 1 | [amilajack](https://github.com/amilajack) | +| 1 | [mrmlnc](https://github.com/mrmlnc) | +| 1 | [devongovett](https://github.com/devongovett) | +| 1 | [DianeLooney](https://github.com/DianeLooney) | +| 1 | [UltCombo](https://github.com/UltCombo) | +| 1 | [tomByrer](https://github.com/tomByrer) | +| 1 | [fidian](https://github.com/fidian) | + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on February 18, 2018._ \ No newline at end of file diff --git a/node_modules/micromatch/index.js b/node_modules/micromatch/index.js new file mode 100644 index 00000000..fe02f2cb --- /dev/null +++ b/node_modules/micromatch/index.js @@ -0,0 +1,877 @@ +'use strict'; + +/** + * Module dependencies + */ + +var util = require('util'); +var braces = require('braces'); +var toRegex = require('to-regex'); +var extend = require('extend-shallow'); + +/** + * Local dependencies + */ + +var compilers = require('./lib/compilers'); +var parsers = require('./lib/parsers'); +var cache = require('./lib/cache'); +var utils = require('./lib/utils'); +var MAX_LENGTH = 1024 * 64; + +/** + * The main function takes a list of strings and one or more + * glob patterns to use for matching. + * + * ```js + * var mm = require('micromatch'); + * mm(list, patterns[, options]); + * + * console.log(mm(['a.js', 'a.txt'], ['*.js'])); + * //=> [ 'a.js' ] + * ``` + * @param {Array} `list` A list of strings to match + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of matches + * @summary false + * @api public + */ + +function micromatch(list, patterns, options) { + patterns = utils.arrayify(patterns); + list = utils.arrayify(list); + + var len = patterns.length; + if (list.length === 0 || len === 0) { + return []; + } + + if (len === 1) { + return micromatch.match(list, patterns[0], options); + } + + var omit = []; + var keep = []; + var idx = -1; + + while (++idx < len) { + var pattern = patterns[idx]; + + if (typeof pattern === 'string' && pattern.charCodeAt(0) === 33 /* ! */) { + omit.push.apply(omit, micromatch.match(list, pattern.slice(1), options)); + } else { + keep.push.apply(keep, micromatch.match(list, pattern, options)); + } + } + + var matches = utils.diff(keep, omit); + if (!options || options.nodupes !== false) { + return utils.unique(matches); + } + + return matches; +} + +/** + * Similar to the main function, but `pattern` must be a string. + * + * ```js + * var mm = require('micromatch'); + * mm.match(list, pattern[, options]); + * + * console.log(mm.match(['a.a', 'a.aa', 'a.b', 'a.c'], '*.a')); + * //=> ['a.a', 'a.aa'] + * ``` + * @param {Array} `list` Array of strings to match + * @param {String} `pattern` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of matches + * @api public + */ + +micromatch.match = function(list, pattern, options) { + if (Array.isArray(pattern)) { + throw new TypeError('expected pattern to be a string'); + } + + var unixify = utils.unixify(options); + var isMatch = memoize('match', pattern, options, micromatch.matcher); + var matches = []; + + list = utils.arrayify(list); + var len = list.length; + var idx = -1; + + while (++idx < len) { + var ele = list[idx]; + if (ele === pattern || isMatch(ele)) { + matches.push(utils.value(ele, unixify, options)); + } + } + + // if no options were passed, uniquify results and return + if (typeof options === 'undefined') { + return utils.unique(matches); + } + + if (matches.length === 0) { + if (options.failglob === true) { + throw new Error('no matches found for "' + pattern + '"'); + } + if (options.nonull === true || options.nullglob === true) { + return [options.unescape ? utils.unescape(pattern) : pattern]; + } + } + + // if `opts.ignore` was defined, diff ignored list + if (options.ignore) { + matches = micromatch.not(matches, options.ignore, options); + } + + return options.nodupes !== false ? utils.unique(matches) : matches; +}; + +/** + * Returns true if the specified `string` matches the given glob `pattern`. + * + * ```js + * var mm = require('micromatch'); + * mm.isMatch(string, pattern[, options]); + * + * console.log(mm.isMatch('a.a', '*.a')); + * //=> true + * console.log(mm.isMatch('a.b', '*.a')); + * //=> false + * ``` + * @param {String} `string` String to match + * @param {String} `pattern` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if the string matches the glob pattern. + * @api public + */ + +micromatch.isMatch = function(str, pattern, options) { + if (typeof str !== 'string') { + throw new TypeError('expected a string: "' + util.inspect(str) + '"'); + } + + if (isEmptyString(str) || isEmptyString(pattern)) { + return false; + } + + var equals = utils.equalsPattern(options); + if (equals(str)) { + return true; + } + + var isMatch = memoize('isMatch', pattern, options, micromatch.matcher); + return isMatch(str); +}; + +/** + * Returns true if some of the strings in the given `list` match any of the + * given glob `patterns`. + * + * ```js + * var mm = require('micromatch'); + * mm.some(list, patterns[, options]); + * + * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // true + * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.some = function(list, patterns, options) { + if (typeof list === 'string') { + list = [list]; + } + for (var i = 0; i < list.length; i++) { + if (micromatch(list[i], patterns, options).length === 1) { + return true; + } + } + return false; +}; + +/** + * Returns true if every string in the given `list` matches + * any of the given glob `patterns`. + * + * ```js + * var mm = require('micromatch'); + * mm.every(list, patterns[, options]); + * + * console.log(mm.every('foo.js', ['foo.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // false + * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.every = function(list, patterns, options) { + if (typeof list === 'string') { + list = [list]; + } + for (var i = 0; i < list.length; i++) { + if (micromatch(list[i], patterns, options).length !== 1) { + return false; + } + } + return true; +}; + +/** + * Returns true if **any** of the given glob `patterns` + * match the specified `string`. + * + * ```js + * var mm = require('micromatch'); + * mm.any(string, patterns[, options]); + * + * console.log(mm.any('a.a', ['b.*', '*.a'])); + * //=> true + * console.log(mm.any('a.a', 'b.*')); + * //=> false + * ``` + * @param {String|Array} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.any = function(str, patterns, options) { + if (typeof str !== 'string') { + throw new TypeError('expected a string: "' + util.inspect(str) + '"'); + } + + if (isEmptyString(str) || isEmptyString(patterns)) { + return false; + } + + if (typeof patterns === 'string') { + patterns = [patterns]; + } + + for (var i = 0; i < patterns.length; i++) { + if (micromatch.isMatch(str, patterns[i], options)) { + return true; + } + } + return false; +}; + +/** + * Returns true if **all** of the given `patterns` match + * the specified string. + * + * ```js + * var mm = require('micromatch'); + * mm.all(string, patterns[, options]); + * + * console.log(mm.all('foo.js', ['foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); + * // false + * + * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); + * // true + * ``` + * @param {String|Array} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.all = function(str, patterns, options) { + if (typeof str !== 'string') { + throw new TypeError('expected a string: "' + util.inspect(str) + '"'); + } + if (typeof patterns === 'string') { + patterns = [patterns]; + } + for (var i = 0; i < patterns.length; i++) { + if (!micromatch.isMatch(str, patterns[i], options)) { + return false; + } + } + return true; +}; + +/** + * Returns a list of strings that _**do not match any**_ of the given `patterns`. + * + * ```js + * var mm = require('micromatch'); + * mm.not(list, patterns[, options]); + * + * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); + * //=> ['b.b', 'c.c'] + * ``` + * @param {Array} `list` Array of strings to match. + * @param {String|Array} `patterns` One or more glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of strings that **do not match** the given patterns. + * @api public + */ + +micromatch.not = function(list, patterns, options) { + var opts = extend({}, options); + var ignore = opts.ignore; + delete opts.ignore; + + var unixify = utils.unixify(opts); + list = utils.arrayify(list).map(unixify); + + var matches = utils.diff(list, micromatch(list, patterns, opts)); + if (ignore) { + matches = utils.diff(matches, micromatch(list, ignore)); + } + + return opts.nodupes !== false ? utils.unique(matches) : matches; +}; + +/** + * Returns true if the given `string` contains the given pattern. Similar + * to [.isMatch](#isMatch) but the pattern can match any part of the string. + * + * ```js + * var mm = require('micromatch'); + * mm.contains(string, pattern[, options]); + * + * console.log(mm.contains('aa/bb/cc', '*b')); + * //=> true + * console.log(mm.contains('aa/bb/cc', '*d')); + * //=> false + * ``` + * @param {String} `str` The string to match. + * @param {String|Array} `patterns` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if the patter matches any part of `str`. + * @api public + */ + +micromatch.contains = function(str, patterns, options) { + if (typeof str !== 'string') { + throw new TypeError('expected a string: "' + util.inspect(str) + '"'); + } + + if (typeof patterns === 'string') { + if (isEmptyString(str) || isEmptyString(patterns)) { + return false; + } + + var equals = utils.equalsPattern(patterns, options); + if (equals(str)) { + return true; + } + var contains = utils.containsPattern(patterns, options); + if (contains(str)) { + return true; + } + } + + var opts = extend({}, options, {contains: true}); + return micromatch.any(str, patterns, opts); +}; + +/** + * Returns true if the given pattern and options should enable + * the `matchBase` option. + * @return {Boolean} + * @api private + */ + +micromatch.matchBase = function(pattern, options) { + if (pattern && pattern.indexOf('/') !== -1 || !options) return false; + return options.basename === true || options.matchBase === true; +}; + +/** + * Filter the keys of the given object with the given `glob` pattern + * and `options`. Does not attempt to match nested keys. If you need this feature, + * use [glob-object][] instead. + * + * ```js + * var mm = require('micromatch'); + * mm.matchKeys(object, patterns[, options]); + * + * var obj = { aa: 'a', ab: 'b', ac: 'c' }; + * console.log(mm.matchKeys(obj, '*b')); + * //=> { ab: 'b' } + * ``` + * @param {Object} `object` The object with keys to filter. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Object} Returns an object with only keys that match the given patterns. + * @api public + */ + +micromatch.matchKeys = function(obj, patterns, options) { + if (!utils.isObject(obj)) { + throw new TypeError('expected the first argument to be an object'); + } + var keys = micromatch(Object.keys(obj), patterns, options); + return utils.pick(obj, keys); +}; + +/** + * Returns a memoized matcher function from the given glob `pattern` and `options`. + * The returned function takes a string to match as its only argument and returns + * true if the string is a match. + * + * ```js + * var mm = require('micromatch'); + * mm.matcher(pattern[, options]); + * + * var isMatch = mm.matcher('*.!(*a)'); + * console.log(isMatch('a.a')); + * //=> false + * console.log(isMatch('a.b')); + * //=> true + * ``` + * @param {String} `pattern` Glob pattern + * @param {Object} `options` See available [options](#options) for changing how matches are performed. + * @return {Function} Returns a matcher function. + * @api public + */ + +micromatch.matcher = function matcher(pattern, options) { + if (Array.isArray(pattern)) { + return compose(pattern, options, matcher); + } + + // if pattern is a regex + if (pattern instanceof RegExp) { + return test(pattern); + } + + // if pattern is invalid + if (!utils.isString(pattern)) { + throw new TypeError('expected pattern to be an array, string or regex'); + } + + // if pattern is a non-glob string + if (!utils.hasSpecialChars(pattern)) { + if (options && options.nocase === true) { + pattern = pattern.toLowerCase(); + } + return utils.matchPath(pattern, options); + } + + // if pattern is a glob string + var re = micromatch.makeRe(pattern, options); + + // if `options.matchBase` or `options.basename` is defined + if (micromatch.matchBase(pattern, options)) { + return utils.matchBasename(re, options); + } + + function test(regex) { + var equals = utils.equalsPattern(options); + var unixify = utils.unixify(options); + + return function(str) { + if (equals(str)) { + return true; + } + + if (regex.test(unixify(str))) { + return true; + } + return false; + }; + } + + var fn = test(re); + Object.defineProperty(fn, 'result', { + configurable: true, + enumerable: false, + value: re.result + }); + return fn; +}; + +/** + * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. + * + * ```js + * var mm = require('micromatch'); + * mm.capture(pattern, string[, options]); + * + * console.log(mm.capture('test/*.js', 'test/foo.js')); + * //=> ['foo'] + * console.log(mm.capture('test/*.js', 'foo/bar.css')); + * //=> null + * ``` + * @param {String} `pattern` Glob pattern to use for matching. + * @param {String} `string` String to match + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns an array of captures if the string matches the glob pattern, otherwise `null`. + * @api public + */ + +micromatch.capture = function(pattern, str, options) { + var re = micromatch.makeRe(pattern, extend({capture: true}, options)); + var unixify = utils.unixify(options); + + function match() { + return function(string) { + var match = re.exec(unixify(string)); + if (!match) { + return null; + } + + return match.slice(1); + }; + } + + var capture = memoize('capture', pattern, options, match); + return capture(str); +}; + +/** + * Create a regular expression from the given glob `pattern`. + * + * ```js + * var mm = require('micromatch'); + * mm.makeRe(pattern[, options]); + * + * console.log(mm.makeRe('*.js')); + * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ + * ``` + * @param {String} `pattern` A glob pattern to convert to regex. + * @param {Object} `options` See available [options](#options) for changing how matches are performed. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +micromatch.makeRe = function(pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('expected pattern to be a string'); + } + + if (pattern.length > MAX_LENGTH) { + throw new Error('expected pattern to be less than ' + MAX_LENGTH + ' characters'); + } + + function makeRe() { + var result = micromatch.create(pattern, options); + var ast_array = []; + var output = result.map(function(obj) { + obj.ast.state = obj.state; + ast_array.push(obj.ast); + return obj.output; + }); + + var regex = toRegex(output.join('|'), options); + Object.defineProperty(regex, 'result', { + configurable: true, + enumerable: false, + value: ast_array + }); + return regex; + } + + return memoize('makeRe', pattern, options, makeRe); +}; + +/** + * Expand the given brace `pattern`. + * + * ```js + * var mm = require('micromatch'); + * console.log(mm.braces('foo/{a,b}/bar')); + * //=> ['foo/(a|b)/bar'] + * + * console.log(mm.braces('foo/{a,b}/bar', {expand: true})); + * //=> ['foo/(a|b)/bar'] + * ``` + * @param {String} `pattern` String with brace pattern to expand. + * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. + * @return {Array} + * @api public + */ + +micromatch.braces = function(pattern, options) { + if (typeof pattern !== 'string' && !Array.isArray(pattern)) { + throw new TypeError('expected pattern to be an array or string'); + } + + function expand() { + if (options && options.nobrace === true || !/\{.*\}/.test(pattern)) { + return utils.arrayify(pattern); + } + return braces(pattern, options); + } + + return memoize('braces', pattern, options, expand); +}; + +/** + * Proxy to the [micromatch.braces](#method), for parity with + * minimatch. + */ + +micromatch.braceExpand = function(pattern, options) { + var opts = extend({}, options, {expand: true}); + return micromatch.braces(pattern, opts); +}; + +/** + * Parses the given glob `pattern` and returns an array of abstract syntax + * trees (ASTs), with the compiled `output` and optional source `map` on + * each AST. + * + * ```js + * var mm = require('micromatch'); + * mm.create(pattern[, options]); + * + * console.log(mm.create('abc/*.js')); + * // [{ options: { source: 'string', sourcemap: true }, + * // state: {}, + * // compilers: + * // { ... }, + * // output: '(\\.[\\\\\\/])?abc\\/(?!\\.)(?=.)[^\\/]*?\\.js', + * // ast: + * // { type: 'root', + * // errors: [], + * // nodes: + * // [ ... ], + * // dot: false, + * // input: 'abc/*.js' }, + * // parsingErrors: [], + * // map: + * // { version: 3, + * // sources: [ 'string' ], + * // names: [], + * // mappings: 'AAAA,GAAG,EAAC,kBAAC,EAAC,EAAE', + * // sourcesContent: [ 'abc/*.js' ] }, + * // position: { line: 1, column: 28 }, + * // content: {}, + * // files: {}, + * // idx: 6 }] + * ``` + * @param {String} `pattern` Glob pattern to parse and compile. + * @param {Object} `options` Any [options](#options) to change how parsing and compiling is performed. + * @return {Object} Returns an object with the parsed AST, compiled string and optional source map. + * @api public + */ + +micromatch.create = function(pattern, options) { + return memoize('create', pattern, options, function() { + function create(str, opts) { + return micromatch.compile(micromatch.parse(str, opts), opts); + } + + pattern = micromatch.braces(pattern, options); + var len = pattern.length; + var idx = -1; + var res = []; + + while (++idx < len) { + res.push(create(pattern[idx], options)); + } + return res; + }); +}; + +/** + * Parse the given `str` with the given `options`. + * + * ```js + * var mm = require('micromatch'); + * mm.parse(pattern[, options]); + * + * var ast = mm.parse('a/{b,c}/d'); + * console.log(ast); + * // { type: 'root', + * // errors: [], + * // input: 'a/{b,c}/d', + * // nodes: + * // [ { type: 'bos', val: '' }, + * // { type: 'text', val: 'a/' }, + * // { type: 'brace', + * // nodes: + * // [ { type: 'brace.open', val: '{' }, + * // { type: 'text', val: 'b,c' }, + * // { type: 'brace.close', val: '}' } ] }, + * // { type: 'text', val: '/d' }, + * // { type: 'eos', val: '' } ] } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an AST + * @api public + */ + +micromatch.parse = function(pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('expected a string'); + } + + function parse() { + var snapdragon = utils.instantiate(null, options); + parsers(snapdragon, options); + + var ast = snapdragon.parse(pattern, options); + utils.define(ast, 'snapdragon', snapdragon); + ast.input = pattern; + return ast; + } + + return memoize('parse', pattern, options, parse); +}; + +/** + * Compile the given `ast` or string with the given `options`. + * + * ```js + * var mm = require('micromatch'); + * mm.compile(ast[, options]); + * + * var ast = mm.parse('a/{b,c}/d'); + * console.log(mm.compile(ast)); + * // { options: { source: 'string' }, + * // state: {}, + * // compilers: + * // { eos: [Function], + * // noop: [Function], + * // bos: [Function], + * // brace: [Function], + * // 'brace.open': [Function], + * // text: [Function], + * // 'brace.close': [Function] }, + * // output: [ 'a/(b|c)/d' ], + * // ast: + * // { ... }, + * // parsingErrors: [] } + * ``` + * @param {Object|String} `ast` + * @param {Object} `options` + * @return {Object} Returns an object that has an `output` property with the compiled string. + * @api public + */ + +micromatch.compile = function(ast, options) { + if (typeof ast === 'string') { + ast = micromatch.parse(ast, options); + } + + return memoize('compile', ast.input, options, function() { + var snapdragon = utils.instantiate(ast, options); + compilers(snapdragon, options); + return snapdragon.compile(ast, options); + }); +}; + +/** + * Clear the regex cache. + * + * ```js + * mm.clearCache(); + * ``` + * @api public + */ + +micromatch.clearCache = function() { + micromatch.cache.caches = {}; +}; + +/** + * Returns true if the given value is effectively an empty string + */ + +function isEmptyString(val) { + return String(val) === '' || String(val) === './'; +} + +/** + * Compose a matcher function with the given patterns. + * This allows matcher functions to be compiled once and + * called multiple times. + */ + +function compose(patterns, options, matcher) { + var matchers; + + return memoize('compose', String(patterns), options, function() { + return function(file) { + // delay composition until it's invoked the first time, + // after that it won't be called again + if (!matchers) { + matchers = []; + for (var i = 0; i < patterns.length; i++) { + matchers.push(matcher(patterns[i], options)); + } + } + + var len = matchers.length; + while (len--) { + if (matchers[len](file) === true) { + return true; + } + } + return false; + }; + }); +} + +/** + * Memoize a generated regex or function. A unique key is generated + * from the `type` (usually method name), the `pattern`, and + * user-defined options. + */ + +function memoize(type, pattern, options, fn) { + var key = utils.createKey(type + '=' + pattern, options); + + if (options && options.cache === false) { + return fn(pattern, options); + } + + if (cache.has(type, key)) { + return cache.get(type, key); + } + + var val = fn(pattern, options); + cache.set(type, key, val); + return val; +} + +/** + * Expose compiler, parser and cache on `micromatch` + */ + +micromatch.compilers = compilers; +micromatch.parsers = parsers; +micromatch.caches = cache.caches; + +/** + * Expose `micromatch` + * @type {Function} + */ + +module.exports = micromatch; diff --git a/node_modules/micromatch/lib/cache.js b/node_modules/micromatch/lib/cache.js new file mode 100644 index 00000000..fffc4c17 --- /dev/null +++ b/node_modules/micromatch/lib/cache.js @@ -0,0 +1 @@ +module.exports = new (require('fragment-cache'))(); diff --git a/node_modules/micromatch/lib/compilers.js b/node_modules/micromatch/lib/compilers.js new file mode 100644 index 00000000..85cda4f8 --- /dev/null +++ b/node_modules/micromatch/lib/compilers.js @@ -0,0 +1,77 @@ +'use strict'; + +var nanomatch = require('nanomatch'); +var extglob = require('extglob'); + +module.exports = function(snapdragon) { + var compilers = snapdragon.compiler.compilers; + var opts = snapdragon.options; + + // register nanomatch compilers + snapdragon.use(nanomatch.compilers); + + // get references to some specific nanomatch compilers before they + // are overridden by the extglob and/or custom compilers + var escape = compilers.escape; + var qmark = compilers.qmark; + var slash = compilers.slash; + var star = compilers.star; + var text = compilers.text; + var plus = compilers.plus; + var dot = compilers.dot; + + // register extglob compilers or escape exglobs if disabled + if (opts.extglob === false || opts.noext === true) { + snapdragon.compiler.use(escapeExtglobs); + } else { + snapdragon.use(extglob.compilers); + } + + snapdragon.use(function() { + this.options.star = this.options.star || function(/*node*/) { + return '[^\\\\/]*?'; + }; + }); + + // custom micromatch compilers + snapdragon.compiler + + // reset referenced compiler + .set('dot', dot) + .set('escape', escape) + .set('plus', plus) + .set('slash', slash) + .set('qmark', qmark) + .set('star', star) + .set('text', text); +}; + +function escapeExtglobs(compiler) { + compiler.set('paren', function(node) { + var val = ''; + visit(node, function(tok) { + if (tok.val) val += (/^\W/.test(tok.val) ? '\\' : '') + tok.val; + }); + return this.emit(val, node); + }); + + /** + * Visit `node` with the given `fn` + */ + + function visit(node, fn) { + return node.nodes ? mapVisit(node.nodes, fn) : fn(node); + } + + /** + * Map visit over array of `nodes`. + */ + + function mapVisit(nodes, fn) { + var len = nodes.length; + var idx = -1; + while (++idx < len) { + visit(nodes[idx], fn); + } + } +} diff --git a/node_modules/micromatch/lib/parsers.js b/node_modules/micromatch/lib/parsers.js new file mode 100644 index 00000000..f80498ce --- /dev/null +++ b/node_modules/micromatch/lib/parsers.js @@ -0,0 +1,83 @@ +'use strict'; + +var extglob = require('extglob'); +var nanomatch = require('nanomatch'); +var regexNot = require('regex-not'); +var toRegex = require('to-regex'); +var not; + +/** + * Characters to use in negation regex (we want to "not" match + * characters that are matched by other parsers) + */ + +var TEXT = '([!@*?+]?\\(|\\)|\\[:?(?=.*?:?\\])|:?\\]|[*+?!^$.\\\\/])+'; +var createNotRegex = function(opts) { + return not || (not = textRegex(TEXT)); +}; + +/** + * Parsers + */ + +module.exports = function(snapdragon) { + var parsers = snapdragon.parser.parsers; + + // register nanomatch parsers + snapdragon.use(nanomatch.parsers); + + // get references to some specific nanomatch parsers before they + // are overridden by the extglob and/or parsers + var escape = parsers.escape; + var slash = parsers.slash; + var qmark = parsers.qmark; + var plus = parsers.plus; + var star = parsers.star; + var dot = parsers.dot; + + // register extglob parsers + snapdragon.use(extglob.parsers); + + // custom micromatch parsers + snapdragon.parser + .use(function() { + // override "notRegex" created in nanomatch parser + this.notRegex = /^\!+(?!\()/; + }) + // reset the referenced parsers + .capture('escape', escape) + .capture('slash', slash) + .capture('qmark', qmark) + .capture('star', star) + .capture('plus', plus) + .capture('dot', dot) + + /** + * Override `text` parser + */ + + .capture('text', function() { + if (this.isInside('bracket')) return; + var pos = this.position(); + var m = this.match(createNotRegex(this.options)); + if (!m || !m[0]) return; + + // escape regex boundary characters and simple brackets + var val = m[0].replace(/([[\]^$])/g, '\\$1'); + + return pos({ + type: 'text', + val: val + }); + }); +}; + +/** + * Create text regex + */ + +function textRegex(pattern) { + var notStr = regexNot.create(pattern, {contains: true, strictClose: false}); + var prefix = '(?:[\\^]|\\\\|'; + return toRegex(prefix + notStr + ')', {strictClose: false}); +} diff --git a/node_modules/micromatch/lib/utils.js b/node_modules/micromatch/lib/utils.js new file mode 100644 index 00000000..f0ba9177 --- /dev/null +++ b/node_modules/micromatch/lib/utils.js @@ -0,0 +1,309 @@ +'use strict'; + +var utils = module.exports; +var path = require('path'); + +/** + * Module dependencies + */ + +var Snapdragon = require('snapdragon'); +utils.define = require('define-property'); +utils.diff = require('arr-diff'); +utils.extend = require('extend-shallow'); +utils.pick = require('object.pick'); +utils.typeOf = require('kind-of'); +utils.unique = require('array-unique'); + +/** + * Returns true if the platform is windows, or `path.sep` is `\\`. + * This is defined as a function to allow `path.sep` to be set in unit tests, + * or by the user, if there is a reason to do so. + * @return {Boolean} + */ + +utils.isWindows = function() { + return path.sep === '\\' || process.platform === 'win32'; +}; + +/** + * Get the `Snapdragon` instance to use + */ + +utils.instantiate = function(ast, options) { + var snapdragon; + // if an instance was created by `.parse`, use that instance + if (utils.typeOf(ast) === 'object' && ast.snapdragon) { + snapdragon = ast.snapdragon; + // if the user supplies an instance on options, use that instance + } else if (utils.typeOf(options) === 'object' && options.snapdragon) { + snapdragon = options.snapdragon; + // create a new instance + } else { + snapdragon = new Snapdragon(options); + } + + utils.define(snapdragon, 'parse', function(str, options) { + var parsed = Snapdragon.prototype.parse.apply(this, arguments); + parsed.input = str; + + // escape unmatched brace/bracket/parens + var last = this.parser.stack.pop(); + if (last && this.options.strictErrors !== true) { + var open = last.nodes[0]; + var inner = last.nodes[1]; + if (last.type === 'bracket') { + if (inner.val.charAt(0) === '[') { + inner.val = '\\' + inner.val; + } + + } else { + open.val = '\\' + open.val; + var sibling = open.parent.nodes[1]; + if (sibling.type === 'star') { + sibling.loose = true; + } + } + } + + // add non-enumerable parser reference + utils.define(parsed, 'parser', this.parser); + return parsed; + }); + + return snapdragon; +}; + +/** + * Create the key to use for memoization. The key is generated + * by iterating over the options and concatenating key-value pairs + * to the pattern string. + */ + +utils.createKey = function(pattern, options) { + if (utils.typeOf(options) !== 'object') { + return pattern; + } + var val = pattern; + var keys = Object.keys(options); + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + val += ';' + key + '=' + String(options[key]); + } + return val; +}; + +/** + * Cast `val` to an array + * @return {Array} + */ + +utils.arrayify = function(val) { + if (typeof val === 'string') return [val]; + return val ? (Array.isArray(val) ? val : [val]) : []; +}; + +/** + * Return true if `val` is a non-empty string + */ + +utils.isString = function(val) { + return typeof val === 'string'; +}; + +/** + * Return true if `val` is a non-empty string + */ + +utils.isObject = function(val) { + return utils.typeOf(val) === 'object'; +}; + +/** + * Returns true if the given `str` has special characters + */ + +utils.hasSpecialChars = function(str) { + return /(?:(?:(^|\/)[!.])|[*?+()|\[\]{}]|[+@]\()/.test(str); +}; + +/** + * Escape regex characters in the given string + */ + +utils.escapeRegex = function(str) { + return str.replace(/[-[\]{}()^$|*+?.\\\/\s]/g, '\\$&'); +}; + +/** + * Normalize slashes in the given filepath. + * + * @param {String} `filepath` + * @return {String} + */ + +utils.toPosixPath = function(str) { + return str.replace(/\\+/g, '/'); +}; + +/** + * Strip backslashes before special characters in a string. + * + * @param {String} `str` + * @return {String} + */ + +utils.unescape = function(str) { + return utils.toPosixPath(str.replace(/\\(?=[*+?!.])/g, '')); +}; + +/** + * Strip the prefix from a filepath + * @param {String} `fp` + * @return {String} + */ + +utils.stripPrefix = function(str) { + if (str.charAt(0) !== '.') { + return str; + } + var ch = str.charAt(1); + if (utils.isSlash(ch)) { + return str.slice(2); + } + return str; +}; + +/** + * Returns true if the given str is an escaped or + * unescaped path character + */ + +utils.isSlash = function(str) { + return str === '/' || str === '\\/' || str === '\\' || str === '\\\\'; +}; + +/** + * Returns a function that returns true if the given + * pattern matches or contains a `filepath` + * + * @param {String} `pattern` + * @return {Function} + */ + +utils.matchPath = function(pattern, options) { + return (options && options.contains) + ? utils.containsPattern(pattern, options) + : utils.equalsPattern(pattern, options); +}; + +/** + * Returns true if the given (original) filepath or unixified path are equal + * to the given pattern. + */ + +utils._equals = function(filepath, unixPath, pattern) { + return pattern === filepath || pattern === unixPath; +}; + +/** + * Returns true if the given (original) filepath or unixified path contain + * the given pattern. + */ + +utils._contains = function(filepath, unixPath, pattern) { + return filepath.indexOf(pattern) !== -1 || unixPath.indexOf(pattern) !== -1; +}; + +/** + * Returns a function that returns true if the given + * pattern is the same as a given `filepath` + * + * @param {String} `pattern` + * @return {Function} + */ + +utils.equalsPattern = function(pattern, options) { + var unixify = utils.unixify(options); + options = options || {}; + + return function fn(filepath) { + var equal = utils._equals(filepath, unixify(filepath), pattern); + if (equal === true || options.nocase !== true) { + return equal; + } + var lower = filepath.toLowerCase(); + return utils._equals(lower, unixify(lower), pattern); + }; +}; + +/** + * Returns a function that returns true if the given + * pattern contains a `filepath` + * + * @param {String} `pattern` + * @return {Function} + */ + +utils.containsPattern = function(pattern, options) { + var unixify = utils.unixify(options); + options = options || {}; + + return function(filepath) { + var contains = utils._contains(filepath, unixify(filepath), pattern); + if (contains === true || options.nocase !== true) { + return contains; + } + var lower = filepath.toLowerCase(); + return utils._contains(lower, unixify(lower), pattern); + }; +}; + +/** + * Returns a function that returns true if the given + * regex matches the `filename` of a file path. + * + * @param {RegExp} `re` Matching regex + * @return {Function} + */ + +utils.matchBasename = function(re) { + return function(filepath) { + return re.test(path.basename(filepath)); + }; +}; + +/** + * Determines the filepath to return based on the provided options. + * @return {any} + */ + +utils.value = function(str, unixify, options) { + if (options && options.unixify === false) { + return str; + } + return unixify(str); +}; + +/** + * Returns a function that normalizes slashes in a string to forward + * slashes, strips `./` from beginning of paths, and optionally unescapes + * special characters. + * @return {Function} + */ + +utils.unixify = function(options) { + options = options || {}; + return function(filepath) { + if (utils.isWindows() || options.unixify === true) { + filepath = utils.toPosixPath(filepath); + } + if (options.stripPrefix !== false) { + filepath = utils.stripPrefix(filepath); + } + if (options.unescape === true) { + filepath = utils.unescape(filepath); + } + return filepath; + }; +}; diff --git a/node_modules/micromatch/node_modules/define-property/CHANGELOG.md b/node_modules/micromatch/node_modules/define-property/CHANGELOG.md new file mode 100644 index 00000000..901c8aae --- /dev/null +++ b/node_modules/micromatch/node_modules/define-property/CHANGELOG.md @@ -0,0 +1,82 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [2.0.0] - 2017-04-20 + +### Changed + +- Now supports data descriptors in addition to accessor descriptors. +- Now uses [Reflect.defineProperty][reflect] when available, otherwise falls back to [Object.defineProperty][object]. + +## [1.0.0] - 2017-04-20 + +- stable release + +## [0.2.5] - 2015-08-31 + +- use is-descriptor + +## [0.2.3] - 2015-08-29 + +- check keys length + +## [0.2.2] - 2015-08-27 + +- ensure val is an object + +## [0.2.1] - 2015-08-27 + +- support functions + +## [0.2.0] - 2015-08-27 + +- support get/set +- update docs + +## [0.1.0] - 2015-08-12 + +- first commit + +[2.0.0]: https://github.com/jonschlinkert/define-property/compare/1.0.0...2.0.0 +[1.0.0]: https://github.com/jonschlinkert/define-property/compare/0.2.5...1.0.0 +[0.2.5]: https://github.com/jonschlinkert/define-property/compare/0.2.3...0.2.5 +[0.2.3]: https://github.com/jonschlinkert/define-property/compare/0.2.2...0.2.3 +[0.2.2]: https://github.com/jonschlinkert/define-property/compare/0.2.1...0.2.2 +[0.2.1]: https://github.com/jonschlinkert/define-property/compare/0.2.0...0.2.1 +[0.2.0]: https://github.com/jonschlinkert/define-property/compare/0.1.3...0.2.0 + +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog + +[object]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/defineProperty +[reflect]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect/defineProperty diff --git a/node_modules/micromatch/node_modules/define-property/LICENSE b/node_modules/micromatch/node_modules/define-property/LICENSE new file mode 100644 index 00000000..f8de0630 --- /dev/null +++ b/node_modules/micromatch/node_modules/define-property/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/micromatch/node_modules/define-property/README.md b/node_modules/micromatch/node_modules/define-property/README.md new file mode 100644 index 00000000..f1ee8f92 --- /dev/null +++ b/node_modules/micromatch/node_modules/define-property/README.md @@ -0,0 +1,117 @@ +# define-property [![NPM version](https://img.shields.io/npm/v/define-property.svg?style=flat)](https://www.npmjs.com/package/define-property) [![NPM monthly downloads](https://img.shields.io/npm/dm/define-property.svg?style=flat)](https://npmjs.org/package/define-property) [![NPM total downloads](https://img.shields.io/npm/dt/define-property.svg?style=flat)](https://npmjs.org/package/define-property) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/define-property.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/define-property) + +> Define a non-enumerable property on an object. Uses Reflect.defineProperty when available, otherwise Object.defineProperty. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save define-property +``` + +## Release history + +See [the CHANGELOG](changelog.md) for updates. + +## Usage + +**Params** + +* `object`: The object on which to define the property. +* `key`: The name of the property to be defined or modified. +* `value`: The value or descriptor of the property being defined or modified. + +```js +var define = require('define-property'); +var obj = {}; +define(obj, 'foo', function(val) { + return val.toUpperCase(); +}); + +// by default, defined properties are non-enumberable +console.log(obj); +//=> {} + +console.log(obj.foo('bar')); +//=> 'BAR' +``` + +**defining setters/getters** + +Pass the same properties you would if using [Object.defineProperty](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/defineProperty) or [Reflect.defineProperty](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect/defineProperty). + +```js +define(obj, 'foo', { + set: function() {}, + get: function() {} +}); +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target… [more](https://github.com/jonschlinkert/assign-deep) | [homepage](https://github.com/jonschlinkert/assign-deep "Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target (first) object.") +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [merge-deep](https://www.npmjs.com/package/merge-deep): Recursively merge values in a javascript object. | [homepage](https://github.com/jonschlinkert/merge-deep "Recursively merge values in a javascript object.") +* [mixin-deep](https://www.npmjs.com/package/mixin-deep): Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone. | [homepage](https://github.com/jonschlinkert/mixin-deep "Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 28 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [doowb](https://github.com/doowb) | + +### Author + +**Jon Schlinkert** + +* Connect with me on [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* Follow me on [github/jonschlinkert](https://github.com/jonschlinkert) +* Follow me on [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on January 25, 2018._ \ No newline at end of file diff --git a/node_modules/micromatch/node_modules/define-property/index.js b/node_modules/micromatch/node_modules/define-property/index.js new file mode 100644 index 00000000..0efa0a9e --- /dev/null +++ b/node_modules/micromatch/node_modules/define-property/index.js @@ -0,0 +1,38 @@ +/*! + * define-property + * + * Copyright (c) 2015-2018, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isobject = require('isobject'); +var isDescriptor = require('is-descriptor'); +var define = (typeof Reflect !== 'undefined' && Reflect.defineProperty) + ? Reflect.defineProperty + : Object.defineProperty; + +module.exports = function defineProperty(obj, key, val) { + if (!isobject(obj) && typeof obj !== 'function' && !Array.isArray(obj)) { + throw new TypeError('expected an object, function, or array'); + } + + if (typeof key !== 'string') { + throw new TypeError('expected "key" to be a string'); + } + + if (isDescriptor(val)) { + define(obj, key, val); + return obj; + } + + define(obj, key, { + configurable: true, + enumerable: false, + writable: true, + value: val + }); + + return obj; +}; diff --git a/node_modules/micromatch/node_modules/define-property/package.json b/node_modules/micromatch/node_modules/define-property/package.json new file mode 100644 index 00000000..f8fd21cb --- /dev/null +++ b/node_modules/micromatch/node_modules/define-property/package.json @@ -0,0 +1,67 @@ +{ + "name": "define-property", + "description": "Define a non-enumerable property on an object. Uses Reflect.defineProperty when available, otherwise Object.defineProperty.", + "version": "2.0.2", + "homepage": "https://github.com/jonschlinkert/define-property", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/define-property", + "bugs": { + "url": "https://github.com/jonschlinkert/define-property/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-descriptor": "^1.0.2", + "isobject": "^3.0.1" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "define", + "define-property", + "enumerable", + "key", + "non", + "non-enumerable", + "object", + "prop", + "property", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "assign-deep", + "extend-shallow", + "merge-deep", + "mixin-deep" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/micromatch/node_modules/extend-shallow/LICENSE b/node_modules/micromatch/node_modules/extend-shallow/LICENSE new file mode 100644 index 00000000..99c93691 --- /dev/null +++ b/node_modules/micromatch/node_modules/extend-shallow/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, 2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/micromatch/node_modules/extend-shallow/README.md b/node_modules/micromatch/node_modules/extend-shallow/README.md new file mode 100644 index 00000000..dee226f4 --- /dev/null +++ b/node_modules/micromatch/node_modules/extend-shallow/README.md @@ -0,0 +1,97 @@ +# extend-shallow [![NPM version](https://img.shields.io/npm/v/extend-shallow.svg?style=flat)](https://www.npmjs.com/package/extend-shallow) [![NPM monthly downloads](https://img.shields.io/npm/dm/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![NPM total downloads](https://img.shields.io/npm/dt/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/extend-shallow.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/extend-shallow) + +> Extend an object with the properties of additional objects. node.js/javascript util. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save extend-shallow +``` + +## Usage + +```js +var extend = require('extend-shallow'); + +extend({a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +Pass an empty object to shallow clone: + +```js +var obj = {}; +extend(obj, {a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [for-in](https://www.npmjs.com/package/for-in): Iterate over the own and inherited enumerable properties of an object, and return an object… [more](https://github.com/jonschlinkert/for-in) | [homepage](https://github.com/jonschlinkert/for-in "Iterate over the own and inherited enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js") +* [for-own](https://www.npmjs.com/package/for-own): Iterate over the own enumerable properties of an object, and return an object with properties… [more](https://github.com/jonschlinkert/for-own) | [homepage](https://github.com/jonschlinkert/for-own "Iterate over the own enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 33 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [pdehaan](https://github.com/pdehaan) | + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on November 19, 2017._ \ No newline at end of file diff --git a/node_modules/micromatch/node_modules/extend-shallow/index.js b/node_modules/micromatch/node_modules/extend-shallow/index.js new file mode 100644 index 00000000..c9582f8f --- /dev/null +++ b/node_modules/micromatch/node_modules/extend-shallow/index.js @@ -0,0 +1,60 @@ +'use strict'; + +var isExtendable = require('is-extendable'); +var assignSymbols = require('assign-symbols'); + +module.exports = Object.assign || function(obj/*, objects*/) { + if (obj === null || typeof obj === 'undefined') { + throw new TypeError('Cannot convert undefined or null to object'); + } + if (!isObject(obj)) { + obj = {}; + } + for (var i = 1; i < arguments.length; i++) { + var val = arguments[i]; + if (isString(val)) { + val = toObject(val); + } + if (isObject(val)) { + assign(obj, val); + assignSymbols(obj, val); + } + } + return obj; +}; + +function assign(a, b) { + for (var key in b) { + if (hasOwn(b, key)) { + a[key] = b[key]; + } + } +} + +function isString(val) { + return (val && typeof val === 'string'); +} + +function toObject(str) { + var obj = {}; + for (var i in str) { + obj[i] = str[i]; + } + return obj; +} + +function isObject(val) { + return (val && typeof val === 'object') || isExtendable(val); +} + +/** + * Returns true if the given `key` is an own property of `obj`. + */ + +function hasOwn(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +function isEnum(obj, key) { + return Object.prototype.propertyIsEnumerable.call(obj, key); +} diff --git a/node_modules/micromatch/node_modules/extend-shallow/package.json b/node_modules/micromatch/node_modules/extend-shallow/package.json new file mode 100644 index 00000000..e5e91053 --- /dev/null +++ b/node_modules/micromatch/node_modules/extend-shallow/package.json @@ -0,0 +1,83 @@ +{ + "name": "extend-shallow", + "description": "Extend an object with the properties of additional objects. node.js/javascript util.", + "version": "3.0.2", + "homepage": "https://github.com/jonschlinkert/extend-shallow", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Peter deHaan (http://about.me/peterdehaan)" + ], + "repository": "jonschlinkert/extend-shallow", + "bugs": { + "url": "https://github.com/jonschlinkert/extend-shallow/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "devDependencies": { + "array-slice": "^1.0.0", + "benchmarked": "^2.0.0", + "for-own": "^1.0.0", + "gulp-format-md": "^1.0.0", + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.1", + "minimist": "^1.2.0", + "mocha": "^3.5.3", + "object-assign": "^4.1.1" + }, + "keywords": [ + "assign", + "clone", + "extend", + "merge", + "obj", + "object", + "object-assign", + "object.assign", + "prop", + "properties", + "property", + "props", + "shallow", + "util", + "utility", + "utils", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "related": { + "list": [ + "extend-shallow", + "for-in", + "for-own", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/micromatch/node_modules/is-extendable/LICENSE b/node_modules/micromatch/node_modules/is-extendable/LICENSE new file mode 100644 index 00000000..c0d7f136 --- /dev/null +++ b/node_modules/micromatch/node_modules/is-extendable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/micromatch/node_modules/is-extendable/README.md b/node_modules/micromatch/node_modules/is-extendable/README.md new file mode 100644 index 00000000..875b56a7 --- /dev/null +++ b/node_modules/micromatch/node_modules/is-extendable/README.md @@ -0,0 +1,88 @@ +# is-extendable [![NPM version](https://img.shields.io/npm/v/is-extendable.svg?style=flat)](https://www.npmjs.com/package/is-extendable) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![NPM total downloads](https://img.shields.io/npm/dt/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-extendable.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-extendable) + +> Returns true if a value is a plain object, array or function. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extendable +``` + +## Usage + +```js +var isExtendable = require('is-extendable'); +``` + +Returns true if the value is any of the following: + +* array +* plain object +* function + +## Notes + +All objects in JavaScript can have keys, but it's a pain to check for this, since we ether need to verify that the value is not `null` or `undefined` and: + +* the value is not a primitive, or +* that the object is a plain object, function or array + +Also note that an `extendable` object is not the same as an [extensible object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/isExtensible), which is one that (in es6) is not sealed, frozen, or marked as non-extensible using `preventExtensions`. + +## Release history + +### v1.0.0 - 2017/07/20 + +**Breaking changes** + +* No longer considers date, regex or error objects to be extendable + +## About + +### Related projects + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target… [more](https://github.com/jonschlinkert/assign-deep) | [homepage](https://github.com/jonschlinkert/assign-deep "Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target (first) object.") +* [is-equal-shallow](https://www.npmjs.com/package/is-equal-shallow): Does a shallow comparison of two objects, returning false if the keys or values differ. | [homepage](https://github.com/jonschlinkert/is-equal-shallow "Does a shallow comparison of two objects, returning false if the keys or values differ.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 20, 2017._ \ No newline at end of file diff --git a/node_modules/micromatch/node_modules/is-extendable/index.d.ts b/node_modules/micromatch/node_modules/is-extendable/index.d.ts new file mode 100644 index 00000000..b96d5075 --- /dev/null +++ b/node_modules/micromatch/node_modules/is-extendable/index.d.ts @@ -0,0 +1,5 @@ +export = isExtendable; + +declare function isExtendable(val: any): boolean; + +declare namespace isExtendable {} diff --git a/node_modules/micromatch/node_modules/is-extendable/index.js b/node_modules/micromatch/node_modules/is-extendable/index.js new file mode 100644 index 00000000..a8b26ad0 --- /dev/null +++ b/node_modules/micromatch/node_modules/is-extendable/index.js @@ -0,0 +1,14 @@ +/*! + * is-extendable + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isPlainObject = require('is-plain-object'); + +module.exports = function isExtendable(val) { + return isPlainObject(val) || typeof val === 'function' || Array.isArray(val); +}; diff --git a/node_modules/micromatch/node_modules/is-extendable/package.json b/node_modules/micromatch/node_modules/is-extendable/package.json new file mode 100644 index 00000000..2aaab65a --- /dev/null +++ b/node_modules/micromatch/node_modules/is-extendable/package.json @@ -0,0 +1,67 @@ +{ + "name": "is-extendable", + "description": "Returns true if a value is a plain object, array or function.", + "version": "1.0.1", + "homepage": "https://github.com/jonschlinkert/is-extendable", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extendable", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extendable/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "index.d.ts" + ], + "main": "index.js", + "types": "index.d.ts", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.4.2" + }, + "keywords": [ + "array", + "assign", + "check", + "date", + "extend", + "extendable", + "extensible", + "function", + "is", + "object", + "regex", + "test" + ], + "verb": { + "related": { + "list": [ + "assign-deep", + "is-equal-shallow", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/micromatch/node_modules/kind-of/CHANGELOG.md b/node_modules/micromatch/node_modules/kind-of/CHANGELOG.md new file mode 100644 index 00000000..fb30b06d --- /dev/null +++ b/node_modules/micromatch/node_modules/kind-of/CHANGELOG.md @@ -0,0 +1,157 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [6.0.0] - 2017-10-13 + +- refactor code to be more performant +- refactor benchmarks + +## [5.1.0] - 2017-10-13 + +**Added** + +- Merge pull request #15 from aretecode/patch-1 +- adds support and tests for string & array iterators + +**Changed** + +- updates benchmarks + +## [5.0.2] - 2017-08-02 + +- Merge pull request #14 from struct78/master +- Added `undefined` check + +## [5.0.0] - 2017-06-21 + +- Merge pull request #12 from aretecode/iterator +- Set Iterator + Map Iterator +- streamline `isbuffer`, minor edits + +## [4.0.0] - 2017-05-19 + +- Merge pull request #8 from tunnckoCore/master +- update deps + +## [3.2.2] - 2017-05-16 + +- fix version + +## [3.2.1] - 2017-05-16 + +- add browserify + +## [3.2.0] - 2017-04-25 + +- Merge pull request #10 from ksheedlo/unrequire-buffer +- add `promise` support and tests +- Remove unnecessary `Buffer` check + +## [3.1.0] - 2016-12-07 + +- Merge pull request #7 from laggingreflex/err +- add support for `error` and tests +- run update + +## [3.0.4] - 2016-07-29 + +- move tests +- run update + +## [3.0.3] - 2016-05-03 + +- fix prepublish script +- remove unused dep + +## [3.0.0] - 2015-11-17 + +- add typed array support +- Merge pull request #5 from miguelmota/typed-arrays +- adds new tests + +## [2.0.1] - 2015-08-21 + +- use `is-buffer` module + +## [2.0.0] - 2015-05-31 + +- Create fallback for `Array.isArray` if used as a browser package +- Merge pull request #2 from dtothefp/patch-1 +- Merge pull request #3 from pdehaan/patch-1 +- Merge branch 'master' of https://github.com/chorks/kind-of into chorks-master +- optimizations, mostly date and regex + +## [1.1.0] - 2015-02-09 + +- adds `buffer` support +- adds tests for `buffer` + +## [1.0.0] - 2015-01-19 + +- update benchmarks +- optimizations based on benchmarks + +## [0.1.2] - 2014-10-26 + +- return `typeof` value if it's not an object. very slight speed improvement +- use `.slice` +- adds benchmarks + +## [0.1.0] - 2014-9-26 + +- first commit + +[6.0.0]: https://github.com/jonschlinkert/kind-of/compare/5.1.0...6.0.0 +[5.1.0]: https://github.com/jonschlinkert/kind-of/compare/5.0.2...5.1.0 +[5.0.2]: https://github.com/jonschlinkert/kind-of/compare/5.0.1...5.0.2 +[5.0.1]: https://github.com/jonschlinkert/kind-of/compare/5.0.0...5.0.1 +[5.0.0]: https://github.com/jonschlinkert/kind-of/compare/4.0.0...5.0.0 +[4.0.0]: https://github.com/jonschlinkert/kind-of/compare/3.2.2...4.0.0 +[3.2.2]: https://github.com/jonschlinkert/kind-of/compare/3.2.1...3.2.2 +[3.2.1]: https://github.com/jonschlinkert/kind-of/compare/3.2.0...3.2.1 +[3.2.0]: https://github.com/jonschlinkert/kind-of/compare/3.1.0...3.2.0 +[3.1.0]: https://github.com/jonschlinkert/kind-of/compare/3.0.4...3.1.0 +[3.0.4]: https://github.com/jonschlinkert/kind-of/compare/3.0.3...3.0.4 +[3.0.3]: https://github.com/jonschlinkert/kind-of/compare/3.0.0...3.0.3 +[3.0.0]: https://github.com/jonschlinkert/kind-of/compare/2.0.1...3.0.0 +[2.0.1]: https://github.com/jonschlinkert/kind-of/compare/2.0.0...2.0.1 +[2.0.0]: https://github.com/jonschlinkert/kind-of/compare/1.1.0...2.0.0 +[1.1.0]: https://github.com/jonschlinkert/kind-of/compare/1.0.0...1.1.0 +[1.0.0]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...1.0.0 +[0.1.2]: https://github.com/jonschlinkert/kind-of/compare/0.1.0...0.1.2 +[0.1.0]: https://github.com/jonschlinkert/kind-of/commit/2fae09b0b19b1aadb558e9be39f0c3ef6034eb87 + +[Unreleased]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...HEAD +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog + diff --git a/node_modules/micromatch/node_modules/kind-of/LICENSE b/node_modules/micromatch/node_modules/kind-of/LICENSE new file mode 100644 index 00000000..3f2eca18 --- /dev/null +++ b/node_modules/micromatch/node_modules/kind-of/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/micromatch/node_modules/kind-of/README.md b/node_modules/micromatch/node_modules/kind-of/README.md new file mode 100644 index 00000000..4b0d4a81 --- /dev/null +++ b/node_modules/micromatch/node_modules/kind-of/README.md @@ -0,0 +1,365 @@ +# kind-of [![NPM version](https://img.shields.io/npm/v/kind-of.svg?style=flat)](https://www.npmjs.com/package/kind-of) [![NPM monthly downloads](https://img.shields.io/npm/dm/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![NPM total downloads](https://img.shields.io/npm/dt/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/kind-of.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/kind-of) + +> Get the native type of a value. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save kind-of +``` + +Install with [bower](https://bower.io/) + +```sh +$ bower install kind-of --save +``` + +## Why use this? + +1. [it's fast](#benchmarks) | [optimizations](#optimizations) +2. [better type checking](#better-type-checking) + +## Usage + +> es5, es6, and browser ready + +```js +var kindOf = require('kind-of'); + +kindOf(undefined); +//=> 'undefined' + +kindOf(null); +//=> 'null' + +kindOf(true); +//=> 'boolean' + +kindOf(false); +//=> 'boolean' + +kindOf(new Buffer('')); +//=> 'buffer' + +kindOf(42); +//=> 'number' + +kindOf('str'); +//=> 'string' + +kindOf(arguments); +//=> 'arguments' + +kindOf({}); +//=> 'object' + +kindOf(Object.create(null)); +//=> 'object' + +kindOf(new Test()); +//=> 'object' + +kindOf(new Date()); +//=> 'date' + +kindOf([1, 2, 3]); +//=> 'array' + +kindOf(/foo/); +//=> 'regexp' + +kindOf(new RegExp('foo')); +//=> 'regexp' + +kindOf(new Error('error')); +//=> 'error' + +kindOf(function () {}); +//=> 'function' + +kindOf(function * () {}); +//=> 'generatorfunction' + +kindOf(Symbol('str')); +//=> 'symbol' + +kindOf(new Map()); +//=> 'map' + +kindOf(new WeakMap()); +//=> 'weakmap' + +kindOf(new Set()); +//=> 'set' + +kindOf(new WeakSet()); +//=> 'weakset' + +kindOf(new Int8Array()); +//=> 'int8array' + +kindOf(new Uint8Array()); +//=> 'uint8array' + +kindOf(new Uint8ClampedArray()); +//=> 'uint8clampedarray' + +kindOf(new Int16Array()); +//=> 'int16array' + +kindOf(new Uint16Array()); +//=> 'uint16array' + +kindOf(new Int32Array()); +//=> 'int32array' + +kindOf(new Uint32Array()); +//=> 'uint32array' + +kindOf(new Float32Array()); +//=> 'float32array' + +kindOf(new Float64Array()); +//=> 'float64array' +``` + +## Benchmarks + +Benchmarked against [typeof](http://github.com/CodingFu/typeof) and [type-of](https://github.com/ForbesLindesay/type-of). + +```bash +# arguments (32 bytes) + kind-of x 17,024,098 ops/sec ±1.90% (86 runs sampled) + lib-type-of x 11,926,235 ops/sec ±1.34% (83 runs sampled) + lib-typeof x 9,245,257 ops/sec ±1.22% (87 runs sampled) + + fastest is kind-of (by 161% avg) + +# array (22 bytes) + kind-of x 17,196,492 ops/sec ±1.07% (88 runs sampled) + lib-type-of x 8,838,283 ops/sec ±1.02% (87 runs sampled) + lib-typeof x 8,677,848 ops/sec ±0.87% (87 runs sampled) + + fastest is kind-of (by 196% avg) + +# boolean (24 bytes) + kind-of x 16,841,600 ops/sec ±1.10% (86 runs sampled) + lib-type-of x 8,096,787 ops/sec ±0.95% (87 runs sampled) + lib-typeof x 8,423,345 ops/sec ±1.15% (86 runs sampled) + + fastest is kind-of (by 204% avg) + +# buffer (38 bytes) + kind-of x 14,848,060 ops/sec ±1.05% (86 runs sampled) + lib-type-of x 3,671,577 ops/sec ±1.49% (87 runs sampled) + lib-typeof x 8,360,236 ops/sec ±1.24% (86 runs sampled) + + fastest is kind-of (by 247% avg) + +# date (30 bytes) + kind-of x 16,067,761 ops/sec ±1.58% (86 runs sampled) + lib-type-of x 8,954,436 ops/sec ±1.40% (87 runs sampled) + lib-typeof x 8,488,307 ops/sec ±1.51% (84 runs sampled) + + fastest is kind-of (by 184% avg) + +# error (36 bytes) + kind-of x 9,634,090 ops/sec ±1.12% (89 runs sampled) + lib-type-of x 7,735,624 ops/sec ±1.32% (86 runs sampled) + lib-typeof x 7,442,160 ops/sec ±1.11% (90 runs sampled) + + fastest is kind-of (by 127% avg) + +# function (34 bytes) + kind-of x 10,031,494 ops/sec ±1.27% (86 runs sampled) + lib-type-of x 9,502,757 ops/sec ±1.17% (89 runs sampled) + lib-typeof x 8,278,985 ops/sec ±1.08% (88 runs sampled) + + fastest is kind-of (by 113% avg) + +# null (24 bytes) + kind-of x 18,159,808 ops/sec ±1.92% (86 runs sampled) + lib-type-of x 12,927,635 ops/sec ±1.01% (88 runs sampled) + lib-typeof x 7,958,234 ops/sec ±1.21% (89 runs sampled) + + fastest is kind-of (by 174% avg) + +# number (22 bytes) + kind-of x 17,846,779 ops/sec ±0.91% (85 runs sampled) + lib-type-of x 3,316,636 ops/sec ±1.19% (86 runs sampled) + lib-typeof x 2,329,477 ops/sec ±2.21% (85 runs sampled) + + fastest is kind-of (by 632% avg) + +# object-plain (47 bytes) + kind-of x 7,085,155 ops/sec ±1.05% (88 runs sampled) + lib-type-of x 8,870,930 ops/sec ±1.06% (83 runs sampled) + lib-typeof x 8,716,024 ops/sec ±1.05% (87 runs sampled) + + fastest is lib-type-of (by 112% avg) + +# regex (25 bytes) + kind-of x 14,196,052 ops/sec ±1.65% (84 runs sampled) + lib-type-of x 9,554,164 ops/sec ±1.25% (88 runs sampled) + lib-typeof x 8,359,691 ops/sec ±1.07% (87 runs sampled) + + fastest is kind-of (by 158% avg) + +# string (33 bytes) + kind-of x 16,131,428 ops/sec ±1.41% (85 runs sampled) + lib-type-of x 7,273,172 ops/sec ±1.05% (87 runs sampled) + lib-typeof x 7,382,635 ops/sec ±1.17% (85 runs sampled) + + fastest is kind-of (by 220% avg) + +# symbol (34 bytes) + kind-of x 17,011,537 ops/sec ±1.24% (86 runs sampled) + lib-type-of x 3,492,454 ops/sec ±1.23% (89 runs sampled) + lib-typeof x 7,471,235 ops/sec ±2.48% (87 runs sampled) + + fastest is kind-of (by 310% avg) + +# template-strings (36 bytes) + kind-of x 15,434,250 ops/sec ±1.46% (83 runs sampled) + lib-type-of x 7,157,907 ops/sec ±0.97% (87 runs sampled) + lib-typeof x 7,517,986 ops/sec ±0.92% (86 runs sampled) + + fastest is kind-of (by 210% avg) + +# undefined (29 bytes) + kind-of x 19,167,115 ops/sec ±1.71% (87 runs sampled) + lib-type-of x 15,477,740 ops/sec ±1.63% (85 runs sampled) + lib-typeof x 19,075,495 ops/sec ±1.17% (83 runs sampled) + + fastest is lib-typeof,kind-of + +``` + +## Optimizations + +In 7 out of 8 cases, this library is 2x-10x faster than other top libraries included in the benchmarks. There are a few things that lead to this performance advantage, none of them hard and fast rules, but all of them simple and repeatable in almost any code library: + +1. Optimize around the fastest and most common use cases first. Of course, this will change from project-to-project, but I took some time to understand how and why `typeof` checks were being used in my own libraries and other libraries I use a lot. +2. Optimize around bottlenecks - In other words, the order in which conditionals are implemented is significant, because each check is only as fast as the failing checks that came before it. Here, the biggest bottleneck by far is checking for plain objects (an object that was created by the `Object` constructor). I opted to make this check happen by process of elimination rather than brute force up front (e.g. by using something like `val.constructor.name`), so that every other type check would not be penalized it. +3. Don't do uneccessary processing - why do `.slice(8, -1).toLowerCase();` just to get the word `regex`? It's much faster to do `if (type === '[object RegExp]') return 'regex'` +4. There is no reason to make the code in a microlib as terse as possible, just to win points for making it shorter. It's always better to favor performant code over terse code. You will always only be using a single `require()` statement to use the library anyway, regardless of how the code is written. + +## Better type checking + +kind-of seems to be more consistently "correct" than other type checking libs I've looked at. For example, here are some differing results from other popular libs: + +### [typeof](https://github.com/CodingFu/typeof) lib + +Incorrectly identifies instances of custom constructors (pretty common): + +```js +var typeOf = require('typeof'); +function Test() {} +console.log(typeOf(new Test())); +//=> 'test' +``` + +Returns `object` instead of `arguments`: + +```js +function foo() { + console.log(typeOf(arguments)) //=> 'object' +} +foo(); +``` + +### [type-of](https://github.com/ForbesLindesay/type-of) lib + +Incorrectly returns `object` for generator functions, buffers, `Map`, `Set`, `WeakMap` and `WeakSet`: + +```js +function * foo() {} +console.log(typeOf(foo)); +//=> 'object' +console.log(typeOf(new Buffer(''))); +//=> 'object' +console.log(typeOf(new Map())); +//=> 'object' +console.log(typeOf(new Set())); +//=> 'object' +console.log(typeOf(new WeakMap())); +//=> 'object' +console.log(typeOf(new WeakSet())); +//=> 'object' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [is-number](https://www.npmjs.com/package/is-number): Returns true if the value is a number. comprehensive tests. | [homepage](https://github.com/jonschlinkert/is-number "Returns true if the value is a number. comprehensive tests.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 98 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [aretecode](https://github.com/aretecode) | +| 2 | [miguelmota](https://github.com/miguelmota) | +| 1 | [dtothefp](https://github.com/dtothefp) | +| 1 | [ianstormtaylor](https://github.com/ianstormtaylor) | +| 1 | [ksheedlo](https://github.com/ksheedlo) | +| 1 | [pdehaan](https://github.com/pdehaan) | +| 1 | [laggingreflex](https://github.com/laggingreflex) | +| 1 | [charlike-old](https://github.com/charlike-old) | + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on December 01, 2017._ \ No newline at end of file diff --git a/node_modules/micromatch/node_modules/kind-of/index.js b/node_modules/micromatch/node_modules/kind-of/index.js new file mode 100644 index 00000000..aa2bb394 --- /dev/null +++ b/node_modules/micromatch/node_modules/kind-of/index.js @@ -0,0 +1,129 @@ +var toString = Object.prototype.toString; + +module.exports = function kindOf(val) { + if (val === void 0) return 'undefined'; + if (val === null) return 'null'; + + var type = typeof val; + if (type === 'boolean') return 'boolean'; + if (type === 'string') return 'string'; + if (type === 'number') return 'number'; + if (type === 'symbol') return 'symbol'; + if (type === 'function') { + return isGeneratorFn(val) ? 'generatorfunction' : 'function'; + } + + if (isArray(val)) return 'array'; + if (isBuffer(val)) return 'buffer'; + if (isArguments(val)) return 'arguments'; + if (isDate(val)) return 'date'; + if (isError(val)) return 'error'; + if (isRegexp(val)) return 'regexp'; + + switch (ctorName(val)) { + case 'Symbol': return 'symbol'; + case 'Promise': return 'promise'; + + // Set, Map, WeakSet, WeakMap + case 'WeakMap': return 'weakmap'; + case 'WeakSet': return 'weakset'; + case 'Map': return 'map'; + case 'Set': return 'set'; + + // 8-bit typed arrays + case 'Int8Array': return 'int8array'; + case 'Uint8Array': return 'uint8array'; + case 'Uint8ClampedArray': return 'uint8clampedarray'; + + // 16-bit typed arrays + case 'Int16Array': return 'int16array'; + case 'Uint16Array': return 'uint16array'; + + // 32-bit typed arrays + case 'Int32Array': return 'int32array'; + case 'Uint32Array': return 'uint32array'; + case 'Float32Array': return 'float32array'; + case 'Float64Array': return 'float64array'; + } + + if (isGeneratorObj(val)) { + return 'generator'; + } + + // Non-plain objects + type = toString.call(val); + switch (type) { + case '[object Object]': return 'object'; + // iterators + case '[object Map Iterator]': return 'mapiterator'; + case '[object Set Iterator]': return 'setiterator'; + case '[object String Iterator]': return 'stringiterator'; + case '[object Array Iterator]': return 'arrayiterator'; + } + + // other + return type.slice(8, -1).toLowerCase().replace(/\s/g, ''); +}; + +function ctorName(val) { + return val.constructor ? val.constructor.name : null; +} + +function isArray(val) { + if (Array.isArray) return Array.isArray(val); + return val instanceof Array; +} + +function isError(val) { + return val instanceof Error || (typeof val.message === 'string' && val.constructor && typeof val.constructor.stackTraceLimit === 'number'); +} + +function isDate(val) { + if (val instanceof Date) return true; + return typeof val.toDateString === 'function' + && typeof val.getDate === 'function' + && typeof val.setDate === 'function'; +} + +function isRegexp(val) { + if (val instanceof RegExp) return true; + return typeof val.flags === 'string' + && typeof val.ignoreCase === 'boolean' + && typeof val.multiline === 'boolean' + && typeof val.global === 'boolean'; +} + +function isGeneratorFn(name, val) { + return ctorName(name) === 'GeneratorFunction'; +} + +function isGeneratorObj(val) { + return typeof val.throw === 'function' + && typeof val.return === 'function' + && typeof val.next === 'function'; +} + +function isArguments(val) { + try { + if (typeof val.length === 'number' && typeof val.callee === 'function') { + return true; + } + } catch (err) { + if (err.message.indexOf('callee') !== -1) { + return true; + } + } + return false; +} + +/** + * If you need to support Safari 5-7 (8-10 yr-old browser), + * take a look at https://github.com/feross/is-buffer + */ + +function isBuffer(val) { + if (val.constructor && typeof val.constructor.isBuffer === 'function') { + return val.constructor.isBuffer(val); + } + return false; +} diff --git a/node_modules/micromatch/node_modules/kind-of/package.json b/node_modules/micromatch/node_modules/kind-of/package.json new file mode 100644 index 00000000..73d70aee --- /dev/null +++ b/node_modules/micromatch/node_modules/kind-of/package.json @@ -0,0 +1,88 @@ +{ + "name": "kind-of", + "description": "Get the native type of a value.", + "version": "6.0.2", + "homepage": "https://github.com/jonschlinkert/kind-of", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "David Fox-Powell (https://dtothefp.github.io/me)", + "James (https://twitter.com/aretecode)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Ken Sheedlo (kensheedlo.com)", + "laggingreflex (https://github.com/laggingreflex)", + "Miguel Mota (https://miguelmota.com)", + "Peter deHaan (http://about.me/peterdehaan)", + "tunnckoCore (https://i.am.charlike.online)" + ], + "repository": "jonschlinkert/kind-of", + "bugs": { + "url": "https://github.com/jonschlinkert/kind-of/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "prepublish": "browserify -o browser.js -e index.js -s index --bare" + }, + "devDependencies": { + "benchmarked": "^2.0.0", + "browserify": "^14.4.0", + "gulp-format-md": "^1.0.0", + "mocha": "^4.0.1", + "write": "^1.0.3" + }, + "keywords": [ + "arguments", + "array", + "boolean", + "check", + "date", + "function", + "is", + "is-type", + "is-type-of", + "kind", + "kind-of", + "number", + "object", + "of", + "regexp", + "string", + "test", + "type", + "type-of", + "typeof", + "types" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "is-glob", + "is-number", + "is-primitive" + ] + }, + "reflinks": [ + "type-of", + "typeof", + "verb" + ] + } +} diff --git a/node_modules/micromatch/package.json b/node_modules/micromatch/package.json new file mode 100644 index 00000000..44751558 --- /dev/null +++ b/node_modules/micromatch/package.json @@ -0,0 +1,147 @@ +{ + "name": "micromatch", + "description": "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.", + "version": "3.1.10", + "homepage": "https://github.com/micromatch/micromatch", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Amila Welihinda (amilajack.com)", + "Bogdan Chadkin (https://github.com/TrySound)", + "Brian Woodward (https://twitter.com/doowb)", + "Devon Govett (http://badassjs.com)", + "Elan Shanker (https://github.com/es128)", + "Fabrício Matté (https://ultcombo.js.org)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Martin Kolárik (https://kolarik.sk)", + "Olsten Larck (https://i.am.charlike.online)", + "Paul Miller (paulmillr.com)", + "Tom Byrer (https://github.com/tomByrer)", + "Tyler Akins (http://rumkin.com)", + "(https://github.com/DianeLooney)" + ], + "repository": "micromatch/micromatch", + "bugs": { + "url": "https://github.com/micromatch/micromatch/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "devDependencies": { + "bash-match": "^1.0.2", + "for-own": "^1.0.0", + "gulp": "^3.9.1", + "gulp-format-md": "^1.0.0", + "gulp-istanbul": "^1.1.3", + "gulp-mocha": "^5.0.0", + "gulp-unused": "^0.2.1", + "is-windows": "^1.0.2", + "minimatch": "^3.0.4", + "minimist": "^1.2.0", + "mocha": "^3.5.3", + "multimatch": "^2.1.0" + }, + "keywords": [ + "bash", + "expand", + "expansion", + "expression", + "file", + "files", + "filter", + "find", + "glob", + "globbing", + "globs", + "globstar", + "match", + "matcher", + "matches", + "matching", + "micromatch", + "minimatch", + "multimatch", + "path", + "pattern", + "patterns", + "regex", + "regexp", + "regular", + "shell", + "wildcard" + ], + "lintDeps": { + "dependencies": { + "options": { + "lock": { + "snapdragon": "^0.8.1" + } + } + }, + "devDependencies": { + "files": { + "options": { + "ignore": [ + "benchmark/**" + ] + } + } + } + }, + "verb": { + "toc": "collapsible", + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "helpers": [ + "./benchmark/helper.js" + ], + "related": { + "list": [ + "braces", + "expand-brackets", + "extglob", + "fill-range", + "nanomatch" + ] + }, + "lint": { + "reflinks": true + }, + "reflinks": [ + "expand-brackets", + "extglob", + "glob-object", + "minimatch", + "multimatch", + "snapdragon" + ] + } +} diff --git a/node_modules/mime-db/HISTORY.md b/node_modules/mime-db/HISTORY.md new file mode 100644 index 00000000..0907d627 --- /dev/null +++ b/node_modules/mime-db/HISTORY.md @@ -0,0 +1,417 @@ +1.40.0 / 2019-04-20 +=================== + + * Add extensions from IANA for `model/*` types + * Add `text/mdx` with extension `.mdx` + +1.39.0 / 2019-04-04 +=================== + + * Add extensions `.siv` and `.sieve` to `application/sieve` + * Add new upstream MIME types + +1.38.0 / 2019-02-04 +=================== + + * Add extension `.nq` to `application/n-quads` + * Add extension `.nt` to `application/n-triples` + * Add new upstream MIME types + * Mark `text/less` as compressible + +1.37.0 / 2018-10-19 +=================== + + * Add extensions to HEIC image types + * Add new upstream MIME types + +1.36.0 / 2018-08-20 +=================== + + * Add Apple file extensions from IANA + * Add extensions from IANA for `image/*` types + * Add new upstream MIME types + +1.35.0 / 2018-07-15 +=================== + + * Add extension `.owl` to `application/rdf+xml` + * Add new upstream MIME types + - Removes extension `.woff` from `application/font-woff` + +1.34.0 / 2018-06-03 +=================== + + * Add extension `.csl` to `application/vnd.citationstyles.style+xml` + * Add extension `.es` to `application/ecmascript` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/turtle` + * Mark all XML-derived types as compressible + +1.33.0 / 2018-02-15 +=================== + + * Add extensions from IANA for `message/*` types + * Add new upstream MIME types + * Fix some incorrect OOXML types + * Remove `application/font-woff2` + +1.32.0 / 2017-11-29 +=================== + + * Add new upstream MIME types + * Update `text/hjson` to registered `application/hjson` + * Add `text/shex` with extension `.shex` + +1.31.0 / 2017-10-25 +=================== + + * Add `application/raml+yaml` with extension `.raml` + * Add `application/wasm` with extension `.wasm` + * Add new `font` type from IANA + * Add new upstream font extensions + * Add new upstream MIME types + * Add extensions for JPEG-2000 images + +1.30.0 / 2017-08-27 +=================== + + * Add `application/vnd.ms-outlook` + * Add `application/x-arj` + * Add extension `.mjs` to `application/javascript` + * Add glTF types and extensions + * Add new upstream MIME types + * Add `text/x-org` + * Add VirtualBox MIME types + * Fix `source` records for `video/*` types that are IANA + * Update `font/opentype` to registered `font/otf` + +1.29.0 / 2017-07-10 +=================== + + * Add `application/fido.trusted-apps+json` + * Add extension `.wadl` to `application/vnd.sun.wadl+xml` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/css` + +1.28.0 / 2017-05-14 +=================== + + * Add new upstream MIME types + * Add extension `.gz` to `application/gzip` + * Update extensions `.md` and `.markdown` to be `text/markdown` + +1.27.0 / 2017-03-16 +=================== + + * Add new upstream MIME types + * Add `image/apng` with extension `.apng` + +1.26.0 / 2017-01-14 +=================== + + * Add new upstream MIME types + * Add extension `.geojson` to `application/geo+json` + +1.25.0 / 2016-11-11 +=================== + + * Add new upstream MIME types + +1.24.0 / 2016-09-18 +=================== + + * Add `audio/mp3` + * Add new upstream MIME types + +1.23.0 / 2016-05-01 +=================== + + * Add new upstream MIME types + * Add extension `.3gpp` to `audio/3gpp` + +1.22.0 / 2016-02-15 +=================== + + * Add `text/slim` + * Add extension `.rng` to `application/xml` + * Add new upstream MIME types + * Fix extension of `application/dash+xml` to be `.mpd` + * Update primary extension to `.m4a` for `audio/mp4` + +1.21.0 / 2016-01-06 +=================== + + * Add Google document types + * Add new upstream MIME types + +1.20.0 / 2015-11-10 +=================== + + * Add `text/x-suse-ymp` + * Add new upstream MIME types + +1.19.0 / 2015-09-17 +=================== + + * Add `application/vnd.apple.pkpass` + * Add new upstream MIME types + +1.18.0 / 2015-09-03 +=================== + + * Add new upstream MIME types + +1.17.0 / 2015-08-13 +=================== + + * Add `application/x-msdos-program` + * Add `audio/g711-0` + * Add `image/vnd.mozilla.apng` + * Add extension `.exe` to `application/x-msdos-program` + +1.16.0 / 2015-07-29 +=================== + + * Add `application/vnd.uri-map` + +1.15.0 / 2015-07-13 +=================== + + * Add `application/x-httpd-php` + +1.14.0 / 2015-06-25 +=================== + + * Add `application/scim+json` + * Add `application/vnd.3gpp.ussd+xml` + * Add `application/vnd.biopax.rdf+xml` + * Add `text/x-processing` + +1.13.0 / 2015-06-07 +=================== + + * Add nginx as a source + * Add `application/x-cocoa` + * Add `application/x-java-archive-diff` + * Add `application/x-makeself` + * Add `application/x-perl` + * Add `application/x-pilot` + * Add `application/x-redhat-package-manager` + * Add `application/x-sea` + * Add `audio/x-m4a` + * Add `audio/x-realaudio` + * Add `image/x-jng` + * Add `text/mathml` + +1.12.0 / 2015-06-05 +=================== + + * Add `application/bdoc` + * Add `application/vnd.hyperdrive+json` + * Add `application/x-bdoc` + * Add extension `.rtf` to `text/rtf` + +1.11.0 / 2015-05-31 +=================== + + * Add `audio/wav` + * Add `audio/wave` + * Add extension `.litcoffee` to `text/coffeescript` + * Add extension `.sfd-hdstx` to `application/vnd.hydrostatix.sof-data` + * Add extension `.n-gage` to `application/vnd.nokia.n-gage.symbian.install` + +1.10.0 / 2015-05-19 +=================== + + * Add `application/vnd.balsamiq.bmpr` + * Add `application/vnd.microsoft.portable-executable` + * Add `application/x-ns-proxy-autoconfig` + +1.9.1 / 2015-04-19 +================== + + * Remove `.json` extension from `application/manifest+json` + - This is causing bugs downstream + +1.9.0 / 2015-04-19 +================== + + * Add `application/manifest+json` + * Add `application/vnd.micro+json` + * Add `image/vnd.zbrush.pcx` + * Add `image/x-ms-bmp` + +1.8.0 / 2015-03-13 +================== + + * Add `application/vnd.citationstyles.style+xml` + * Add `application/vnd.fastcopy-disk-image` + * Add `application/vnd.gov.sk.xmldatacontainer+xml` + * Add extension `.jsonld` to `application/ld+json` + +1.7.0 / 2015-02-08 +================== + + * Add `application/vnd.gerber` + * Add `application/vnd.msa-disk-image` + +1.6.1 / 2015-02-05 +================== + + * Community extensions ownership transferred from `node-mime` + +1.6.0 / 2015-01-29 +================== + + * Add `application/jose` + * Add `application/jose+json` + * Add `application/json-seq` + * Add `application/jwk+json` + * Add `application/jwk-set+json` + * Add `application/jwt` + * Add `application/rdap+json` + * Add `application/vnd.gov.sk.e-form+xml` + * Add `application/vnd.ims.imsccv1p3` + +1.5.0 / 2014-12-30 +================== + + * Add `application/vnd.oracle.resource+json` + * Fix various invalid MIME type entries + - `application/mbox+xml` + - `application/oscp-response` + - `application/vwg-multiplexed` + - `audio/g721` + +1.4.0 / 2014-12-21 +================== + + * Add `application/vnd.ims.imsccv1p2` + * Fix various invalid MIME type entries + - `application/vnd-acucobol` + - `application/vnd-curl` + - `application/vnd-dart` + - `application/vnd-dxr` + - `application/vnd-fdf` + - `application/vnd-mif` + - `application/vnd-sema` + - `application/vnd-wap-wmlc` + - `application/vnd.adobe.flash-movie` + - `application/vnd.dece-zip` + - `application/vnd.dvb_service` + - `application/vnd.micrografx-igx` + - `application/vnd.sealed-doc` + - `application/vnd.sealed-eml` + - `application/vnd.sealed-mht` + - `application/vnd.sealed-ppt` + - `application/vnd.sealed-tiff` + - `application/vnd.sealed-xls` + - `application/vnd.sealedmedia.softseal-html` + - `application/vnd.sealedmedia.softseal-pdf` + - `application/vnd.wap-slc` + - `application/vnd.wap-wbxml` + - `audio/vnd.sealedmedia.softseal-mpeg` + - `image/vnd-djvu` + - `image/vnd-svf` + - `image/vnd-wap-wbmp` + - `image/vnd.sealed-png` + - `image/vnd.sealedmedia.softseal-gif` + - `image/vnd.sealedmedia.softseal-jpg` + - `model/vnd-dwf` + - `model/vnd.parasolid.transmit-binary` + - `model/vnd.parasolid.transmit-text` + - `text/vnd-a` + - `text/vnd-curl` + - `text/vnd.wap-wml` + * Remove example template MIME types + - `application/example` + - `audio/example` + - `image/example` + - `message/example` + - `model/example` + - `multipart/example` + - `text/example` + - `video/example` + +1.3.1 / 2014-12-16 +================== + + * Fix missing extensions + - `application/json5` + - `text/hjson` + +1.3.0 / 2014-12-07 +================== + + * Add `application/a2l` + * Add `application/aml` + * Add `application/atfx` + * Add `application/atxml` + * Add `application/cdfx+xml` + * Add `application/dii` + * Add `application/json5` + * Add `application/lxf` + * Add `application/mf4` + * Add `application/vnd.apache.thrift.compact` + * Add `application/vnd.apache.thrift.json` + * Add `application/vnd.coffeescript` + * Add `application/vnd.enphase.envoy` + * Add `application/vnd.ims.imsccv1p1` + * Add `text/csv-schema` + * Add `text/hjson` + * Add `text/markdown` + * Add `text/yaml` + +1.2.0 / 2014-11-09 +================== + + * Add `application/cea` + * Add `application/dit` + * Add `application/vnd.gov.sk.e-form+zip` + * Add `application/vnd.tmd.mediaflex.api+xml` + * Type `application/epub+zip` is now IANA-registered + +1.1.2 / 2014-10-23 +================== + + * Rebuild database for `application/x-www-form-urlencoded` change + +1.1.1 / 2014-10-20 +================== + + * Mark `application/x-www-form-urlencoded` as compressible. + +1.1.0 / 2014-09-28 +================== + + * Add `application/font-woff2` + +1.0.3 / 2014-09-25 +================== + + * Fix engine requirement in package + +1.0.2 / 2014-09-25 +================== + + * Add `application/coap-group+json` + * Add `application/dcd` + * Add `application/vnd.apache.thrift.binary` + * Add `image/vnd.tencent.tap` + * Mark all JSON-derived types as compressible + * Update `text/vtt` data + +1.0.1 / 2014-08-30 +================== + + * Fix extension ordering + +1.0.0 / 2014-08-30 +================== + + * Add `application/atf` + * Add `application/merge-patch+json` + * Add `multipart/x-mixed-replace` + * Add `source: 'apache'` metadata + * Add `source: 'iana'` metadata + * Remove badly-assumed charset data diff --git a/node_modules/mime-db/LICENSE b/node_modules/mime-db/LICENSE new file mode 100644 index 00000000..a7ae8ee9 --- /dev/null +++ b/node_modules/mime-db/LICENSE @@ -0,0 +1,22 @@ + +The MIT License (MIT) + +Copyright (c) 2014 Jonathan Ong me@jongleberry.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/mime-db/README.md b/node_modules/mime-db/README.md new file mode 100644 index 00000000..dcc9d093 --- /dev/null +++ b/node_modules/mime-db/README.md @@ -0,0 +1,94 @@ +# mime-db + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][travis-image]][travis-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +This is a database of all mime types. +It consists of a single, public JSON file and does not include any logic, +allowing it to remain as un-opinionated as possible with an API. +It aggregates data from the following sources: + +- http://www.iana.org/assignments/media-types/media-types.xhtml +- http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types +- http://hg.nginx.org/nginx/raw-file/default/conf/mime.types + +## Installation + +```bash +npm install mime-db +``` + +### Database Download + +If you're crazy enough to use this in the browser, you can just grab the +JSON file using [jsDelivr](https://www.jsdelivr.com/). It is recommended to +replace `master` with [a release tag](https://github.com/jshttp/mime-db/tags) +as the JSON format may change in the future. + +``` +https://cdn.jsdelivr.net/gh/jshttp/mime-db@master/db.json +``` + +## Usage + +```js +var db = require('mime-db'); + +// grab data on .js files +var data = db['application/javascript']; +``` + +## Data Structure + +The JSON file is a map lookup for lowercased mime types. +Each mime type has the following properties: + +- `.source` - where the mime type is defined. + If not set, it's probably a custom media type. + - `apache` - [Apache common media types](http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types) + - `iana` - [IANA-defined media types](http://www.iana.org/assignments/media-types/media-types.xhtml) + - `nginx` - [nginx media types](http://hg.nginx.org/nginx/raw-file/default/conf/mime.types) +- `.extensions[]` - known extensions associated with this mime type. +- `.compressible` - whether a file of this type can be gzipped. +- `.charset` - the default charset associated with this type, if any. + +If unknown, every property could be `undefined`. + +## Contributing + +To edit the database, only make PRs against `src/custom.json` or +`src/custom-suffix.json`. + +The `src/custom.json` file is a JSON object with the MIME type as the keys +and the values being an object with the following keys: + +- `compressible` - leave out if you don't know, otherwise `true`/`false` to + indicate whether the data represented by the type is typically compressible. +- `extensions` - include an array of file extensions that are associated with + the type. +- `notes` - human-readable notes about the type, typically what the type is. +- `sources` - include an array of URLs of where the MIME type and the associated + extensions are sourced from. This needs to be a [primary source](https://en.wikipedia.org/wiki/Primary_source); + links to type aggregating sites and Wikipedia are _not acceptable_. + +To update the build, run `npm run build`. + +## Adding Custom Media Types + +The best way to get new media types included in this library is to register +them with the IANA. The community registration procedure is outlined in +[RFC 6838 section 5](http://tools.ietf.org/html/rfc6838#section-5). Types +registered with the IANA are automatically pulled into this library. + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-db/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-db?branch=master +[node-image]: https://badgen.net/npm/node/mime-db +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-db +[npm-url]: https://npmjs.org/package/mime-db +[npm-version-image]: https://badgen.net/npm/v/mime-db +[travis-image]: https://badgen.net/travis/jshttp/mime-db/master +[travis-url]: https://travis-ci.org/jshttp/mime-db diff --git a/node_modules/mime-db/db.json b/node_modules/mime-db/db.json new file mode 100644 index 00000000..a5fc9870 --- /dev/null +++ b/node_modules/mime-db/db.json @@ -0,0 +1,7834 @@ +{ + "application/1d-interleaved-parityfec": { + "source": "iana" + }, + "application/3gpdash-qoe-report+xml": { + "source": "iana", + "compressible": true + }, + "application/3gpp-ims+xml": { + "source": "iana", + "compressible": true + }, + "application/a2l": { + "source": "iana" + }, + "application/activemessage": { + "source": "iana" + }, + "application/activity+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-directory+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcost+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcostparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointprop+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointpropparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-error+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/aml": { + "source": "iana" + }, + "application/andrew-inset": { + "source": "iana", + "extensions": ["ez"] + }, + "application/applefile": { + "source": "iana" + }, + "application/applixware": { + "source": "apache", + "extensions": ["aw"] + }, + "application/atf": { + "source": "iana" + }, + "application/atfx": { + "source": "iana" + }, + "application/atom+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atom"] + }, + "application/atomcat+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomcat"] + }, + "application/atomdeleted+xml": { + "source": "iana", + "compressible": true + }, + "application/atomicmail": { + "source": "iana" + }, + "application/atomsvc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomsvc"] + }, + "application/atsc-dwd+xml": { + "source": "iana", + "compressible": true + }, + "application/atsc-held+xml": { + "source": "iana", + "compressible": true + }, + "application/atsc-rsat+xml": { + "source": "iana", + "compressible": true + }, + "application/atxml": { + "source": "iana" + }, + "application/auth-policy+xml": { + "source": "iana", + "compressible": true + }, + "application/bacnet-xdd+zip": { + "source": "iana", + "compressible": false + }, + "application/batch-smtp": { + "source": "iana" + }, + "application/bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/beep+xml": { + "source": "iana", + "compressible": true + }, + "application/calendar+json": { + "source": "iana", + "compressible": true + }, + "application/calendar+xml": { + "source": "iana", + "compressible": true + }, + "application/call-completion": { + "source": "iana" + }, + "application/cals-1840": { + "source": "iana" + }, + "application/cbor": { + "source": "iana" + }, + "application/cccex": { + "source": "iana" + }, + "application/ccmp+xml": { + "source": "iana", + "compressible": true + }, + "application/ccxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ccxml"] + }, + "application/cdfx+xml": { + "source": "iana", + "compressible": true + }, + "application/cdmi-capability": { + "source": "iana", + "extensions": ["cdmia"] + }, + "application/cdmi-container": { + "source": "iana", + "extensions": ["cdmic"] + }, + "application/cdmi-domain": { + "source": "iana", + "extensions": ["cdmid"] + }, + "application/cdmi-object": { + "source": "iana", + "extensions": ["cdmio"] + }, + "application/cdmi-queue": { + "source": "iana", + "extensions": ["cdmiq"] + }, + "application/cdni": { + "source": "iana" + }, + "application/cea": { + "source": "iana" + }, + "application/cea-2018+xml": { + "source": "iana", + "compressible": true + }, + "application/cellml+xml": { + "source": "iana", + "compressible": true + }, + "application/cfw": { + "source": "iana" + }, + "application/clue_info+xml": { + "source": "iana", + "compressible": true + }, + "application/cms": { + "source": "iana" + }, + "application/cnrp+xml": { + "source": "iana", + "compressible": true + }, + "application/coap-group+json": { + "source": "iana", + "compressible": true + }, + "application/coap-payload": { + "source": "iana" + }, + "application/commonground": { + "source": "iana" + }, + "application/conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/cose": { + "source": "iana" + }, + "application/cose-key": { + "source": "iana" + }, + "application/cose-key-set": { + "source": "iana" + }, + "application/cpl+xml": { + "source": "iana", + "compressible": true + }, + "application/csrattrs": { + "source": "iana" + }, + "application/csta+xml": { + "source": "iana", + "compressible": true + }, + "application/cstadata+xml": { + "source": "iana", + "compressible": true + }, + "application/csvm+json": { + "source": "iana", + "compressible": true + }, + "application/cu-seeme": { + "source": "apache", + "extensions": ["cu"] + }, + "application/cwt": { + "source": "iana" + }, + "application/cybercash": { + "source": "iana" + }, + "application/dart": { + "compressible": true + }, + "application/dash+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpd"] + }, + "application/dashdelta": { + "source": "iana" + }, + "application/davmount+xml": { + "source": "iana", + "compressible": true, + "extensions": ["davmount"] + }, + "application/dca-rft": { + "source": "iana" + }, + "application/dcd": { + "source": "iana" + }, + "application/dec-dx": { + "source": "iana" + }, + "application/dialog-info+xml": { + "source": "iana", + "compressible": true + }, + "application/dicom": { + "source": "iana" + }, + "application/dicom+json": { + "source": "iana", + "compressible": true + }, + "application/dicom+xml": { + "source": "iana", + "compressible": true + }, + "application/dii": { + "source": "iana" + }, + "application/dit": { + "source": "iana" + }, + "application/dns": { + "source": "iana" + }, + "application/dns+json": { + "source": "iana", + "compressible": true + }, + "application/dns-message": { + "source": "iana" + }, + "application/docbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dbk"] + }, + "application/dskpp+xml": { + "source": "iana", + "compressible": true + }, + "application/dssc+der": { + "source": "iana", + "extensions": ["dssc"] + }, + "application/dssc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdssc"] + }, + "application/dvcs": { + "source": "iana" + }, + "application/ecmascript": { + "source": "iana", + "compressible": true, + "extensions": ["ecma","es"] + }, + "application/edi-consent": { + "source": "iana" + }, + "application/edi-x12": { + "source": "iana", + "compressible": false + }, + "application/edifact": { + "source": "iana", + "compressible": false + }, + "application/efi": { + "source": "iana" + }, + "application/emergencycalldata.comment+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.control+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.deviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.ecall.msd": { + "source": "iana" + }, + "application/emergencycalldata.providerinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.serviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.subscriberinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.veds+xml": { + "source": "iana", + "compressible": true + }, + "application/emma+xml": { + "source": "iana", + "compressible": true, + "extensions": ["emma"] + }, + "application/emotionml+xml": { + "source": "iana", + "compressible": true + }, + "application/encaprtp": { + "source": "iana" + }, + "application/epp+xml": { + "source": "iana", + "compressible": true + }, + "application/epub+zip": { + "source": "iana", + "compressible": false, + "extensions": ["epub"] + }, + "application/eshop": { + "source": "iana" + }, + "application/exi": { + "source": "iana", + "extensions": ["exi"] + }, + "application/expect-ct-report+json": { + "source": "iana", + "compressible": true + }, + "application/fastinfoset": { + "source": "iana" + }, + "application/fastsoap": { + "source": "iana" + }, + "application/fdt+xml": { + "source": "iana", + "compressible": true + }, + "application/fhir+json": { + "source": "iana", + "compressible": true + }, + "application/fhir+xml": { + "source": "iana", + "compressible": true + }, + "application/fido.trusted-apps+json": { + "compressible": true + }, + "application/fits": { + "source": "iana" + }, + "application/font-sfnt": { + "source": "iana" + }, + "application/font-tdpfr": { + "source": "iana", + "extensions": ["pfr"] + }, + "application/font-woff": { + "source": "iana", + "compressible": false + }, + "application/framework-attributes+xml": { + "source": "iana", + "compressible": true + }, + "application/geo+json": { + "source": "iana", + "compressible": true, + "extensions": ["geojson"] + }, + "application/geo+json-seq": { + "source": "iana" + }, + "application/geopackage+sqlite3": { + "source": "iana" + }, + "application/geoxacml+xml": { + "source": "iana", + "compressible": true + }, + "application/gltf-buffer": { + "source": "iana" + }, + "application/gml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["gml"] + }, + "application/gpx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["gpx"] + }, + "application/gxf": { + "source": "apache", + "extensions": ["gxf"] + }, + "application/gzip": { + "source": "iana", + "compressible": false, + "extensions": ["gz"] + }, + "application/h224": { + "source": "iana" + }, + "application/held+xml": { + "source": "iana", + "compressible": true + }, + "application/hjson": { + "extensions": ["hjson"] + }, + "application/http": { + "source": "iana" + }, + "application/hyperstudio": { + "source": "iana", + "extensions": ["stk"] + }, + "application/ibe-key-request+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pkg-reply+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pp-data": { + "source": "iana" + }, + "application/iges": { + "source": "iana" + }, + "application/im-iscomposing+xml": { + "source": "iana", + "compressible": true + }, + "application/index": { + "source": "iana" + }, + "application/index.cmd": { + "source": "iana" + }, + "application/index.obj": { + "source": "iana" + }, + "application/index.response": { + "source": "iana" + }, + "application/index.vnd": { + "source": "iana" + }, + "application/inkml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ink","inkml"] + }, + "application/iotp": { + "source": "iana" + }, + "application/ipfix": { + "source": "iana", + "extensions": ["ipfix"] + }, + "application/ipp": { + "source": "iana" + }, + "application/isup": { + "source": "iana" + }, + "application/its+xml": { + "source": "iana", + "compressible": true + }, + "application/java-archive": { + "source": "apache", + "compressible": false, + "extensions": ["jar","war","ear"] + }, + "application/java-serialized-object": { + "source": "apache", + "compressible": false, + "extensions": ["ser"] + }, + "application/java-vm": { + "source": "apache", + "compressible": false, + "extensions": ["class"] + }, + "application/javascript": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["js","mjs"] + }, + "application/jf2feed+json": { + "source": "iana", + "compressible": true + }, + "application/jose": { + "source": "iana" + }, + "application/jose+json": { + "source": "iana", + "compressible": true + }, + "application/jrd+json": { + "source": "iana", + "compressible": true + }, + "application/json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["json","map"] + }, + "application/json-patch+json": { + "source": "iana", + "compressible": true + }, + "application/json-seq": { + "source": "iana" + }, + "application/json5": { + "extensions": ["json5"] + }, + "application/jsonml+json": { + "source": "apache", + "compressible": true, + "extensions": ["jsonml"] + }, + "application/jwk+json": { + "source": "iana", + "compressible": true + }, + "application/jwk-set+json": { + "source": "iana", + "compressible": true + }, + "application/jwt": { + "source": "iana" + }, + "application/kpml-request+xml": { + "source": "iana", + "compressible": true + }, + "application/kpml-response+xml": { + "source": "iana", + "compressible": true + }, + "application/ld+json": { + "source": "iana", + "compressible": true, + "extensions": ["jsonld"] + }, + "application/lgr+xml": { + "source": "iana", + "compressible": true + }, + "application/link-format": { + "source": "iana" + }, + "application/load-control+xml": { + "source": "iana", + "compressible": true + }, + "application/lost+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lostxml"] + }, + "application/lostsync+xml": { + "source": "iana", + "compressible": true + }, + "application/lxf": { + "source": "iana" + }, + "application/mac-binhex40": { + "source": "iana", + "extensions": ["hqx"] + }, + "application/mac-compactpro": { + "source": "apache", + "extensions": ["cpt"] + }, + "application/macwriteii": { + "source": "iana" + }, + "application/mads+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mads"] + }, + "application/manifest+json": { + "charset": "UTF-8", + "compressible": true, + "extensions": ["webmanifest"] + }, + "application/marc": { + "source": "iana", + "extensions": ["mrc"] + }, + "application/marcxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mrcx"] + }, + "application/mathematica": { + "source": "iana", + "extensions": ["ma","nb","mb"] + }, + "application/mathml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mathml"] + }, + "application/mathml-content+xml": { + "source": "iana", + "compressible": true + }, + "application/mathml-presentation+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-associated-procedure-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-deregister+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-envelope+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-protection-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-reception-report+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-schedule+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-user-service-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbox": { + "source": "iana", + "extensions": ["mbox"] + }, + "application/media-policy-dataset+xml": { + "source": "iana", + "compressible": true + }, + "application/media_control+xml": { + "source": "iana", + "compressible": true + }, + "application/mediaservercontrol+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mscml"] + }, + "application/merge-patch+json": { + "source": "iana", + "compressible": true + }, + "application/metalink+xml": { + "source": "apache", + "compressible": true, + "extensions": ["metalink"] + }, + "application/metalink4+xml": { + "source": "iana", + "compressible": true, + "extensions": ["meta4"] + }, + "application/mets+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mets"] + }, + "application/mf4": { + "source": "iana" + }, + "application/mikey": { + "source": "iana" + }, + "application/mmt-aei+xml": { + "source": "iana", + "compressible": true + }, + "application/mmt-usd+xml": { + "source": "iana", + "compressible": true + }, + "application/mods+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mods"] + }, + "application/moss-keys": { + "source": "iana" + }, + "application/moss-signature": { + "source": "iana" + }, + "application/mosskey-data": { + "source": "iana" + }, + "application/mosskey-request": { + "source": "iana" + }, + "application/mp21": { + "source": "iana", + "extensions": ["m21","mp21"] + }, + "application/mp4": { + "source": "iana", + "extensions": ["mp4s","m4p"] + }, + "application/mpeg4-generic": { + "source": "iana" + }, + "application/mpeg4-iod": { + "source": "iana" + }, + "application/mpeg4-iod-xmt": { + "source": "iana" + }, + "application/mrb-consumer+xml": { + "source": "iana", + "compressible": true + }, + "application/mrb-publish+xml": { + "source": "iana", + "compressible": true + }, + "application/msc-ivr+xml": { + "source": "iana", + "compressible": true + }, + "application/msc-mixer+xml": { + "source": "iana", + "compressible": true + }, + "application/msword": { + "source": "iana", + "compressible": false, + "extensions": ["doc","dot"] + }, + "application/mud+json": { + "source": "iana", + "compressible": true + }, + "application/mxf": { + "source": "iana", + "extensions": ["mxf"] + }, + "application/n-quads": { + "source": "iana", + "extensions": ["nq"] + }, + "application/n-triples": { + "source": "iana", + "extensions": ["nt"] + }, + "application/nasdata": { + "source": "iana" + }, + "application/news-checkgroups": { + "source": "iana" + }, + "application/news-groupinfo": { + "source": "iana" + }, + "application/news-transmission": { + "source": "iana" + }, + "application/nlsml+xml": { + "source": "iana", + "compressible": true + }, + "application/node": { + "source": "iana" + }, + "application/nss": { + "source": "iana" + }, + "application/ocsp-request": { + "source": "iana" + }, + "application/ocsp-response": { + "source": "iana" + }, + "application/octet-stream": { + "source": "iana", + "compressible": false, + "extensions": ["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"] + }, + "application/oda": { + "source": "iana", + "extensions": ["oda"] + }, + "application/odm+xml": { + "source": "iana", + "compressible": true + }, + "application/odx": { + "source": "iana" + }, + "application/oebps-package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["opf"] + }, + "application/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogx"] + }, + "application/omdoc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["omdoc"] + }, + "application/onenote": { + "source": "apache", + "extensions": ["onetoc","onetoc2","onetmp","onepkg"] + }, + "application/oscore": { + "source": "iana" + }, + "application/oxps": { + "source": "iana", + "extensions": ["oxps"] + }, + "application/p2p-overlay+xml": { + "source": "iana", + "compressible": true + }, + "application/parityfec": { + "source": "iana" + }, + "application/passport": { + "source": "iana" + }, + "application/patch-ops-error+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xer"] + }, + "application/pdf": { + "source": "iana", + "compressible": false, + "extensions": ["pdf"] + }, + "application/pdx": { + "source": "iana" + }, + "application/pem-certificate-chain": { + "source": "iana" + }, + "application/pgp-encrypted": { + "source": "iana", + "compressible": false, + "extensions": ["pgp"] + }, + "application/pgp-keys": { + "source": "iana" + }, + "application/pgp-signature": { + "source": "iana", + "extensions": ["asc","sig"] + }, + "application/pics-rules": { + "source": "apache", + "extensions": ["prf"] + }, + "application/pidf+xml": { + "source": "iana", + "compressible": true + }, + "application/pidf-diff+xml": { + "source": "iana", + "compressible": true + }, + "application/pkcs10": { + "source": "iana", + "extensions": ["p10"] + }, + "application/pkcs12": { + "source": "iana" + }, + "application/pkcs7-mime": { + "source": "iana", + "extensions": ["p7m","p7c"] + }, + "application/pkcs7-signature": { + "source": "iana", + "extensions": ["p7s"] + }, + "application/pkcs8": { + "source": "iana", + "extensions": ["p8"] + }, + "application/pkcs8-encrypted": { + "source": "iana" + }, + "application/pkix-attr-cert": { + "source": "iana", + "extensions": ["ac"] + }, + "application/pkix-cert": { + "source": "iana", + "extensions": ["cer"] + }, + "application/pkix-crl": { + "source": "iana", + "extensions": ["crl"] + }, + "application/pkix-pkipath": { + "source": "iana", + "extensions": ["pkipath"] + }, + "application/pkixcmp": { + "source": "iana", + "extensions": ["pki"] + }, + "application/pls+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pls"] + }, + "application/poc-settings+xml": { + "source": "iana", + "compressible": true + }, + "application/postscript": { + "source": "iana", + "compressible": true, + "extensions": ["ai","eps","ps"] + }, + "application/ppsp-tracker+json": { + "source": "iana", + "compressible": true + }, + "application/problem+json": { + "source": "iana", + "compressible": true + }, + "application/problem+xml": { + "source": "iana", + "compressible": true + }, + "application/provenance+xml": { + "source": "iana", + "compressible": true + }, + "application/prs.alvestrand.titrax-sheet": { + "source": "iana" + }, + "application/prs.cww": { + "source": "iana", + "extensions": ["cww"] + }, + "application/prs.hpub+zip": { + "source": "iana", + "compressible": false + }, + "application/prs.nprend": { + "source": "iana" + }, + "application/prs.plucker": { + "source": "iana" + }, + "application/prs.rdf-xml-crypt": { + "source": "iana" + }, + "application/prs.xsf+xml": { + "source": "iana", + "compressible": true + }, + "application/pskc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pskcxml"] + }, + "application/qsig": { + "source": "iana" + }, + "application/raml+yaml": { + "compressible": true, + "extensions": ["raml"] + }, + "application/raptorfec": { + "source": "iana" + }, + "application/rdap+json": { + "source": "iana", + "compressible": true + }, + "application/rdf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rdf","owl"] + }, + "application/reginfo+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rif"] + }, + "application/relax-ng-compact-syntax": { + "source": "iana", + "extensions": ["rnc"] + }, + "application/remote-printing": { + "source": "iana" + }, + "application/reputon+json": { + "source": "iana", + "compressible": true + }, + "application/resource-lists+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rl"] + }, + "application/resource-lists-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rld"] + }, + "application/rfc+xml": { + "source": "iana", + "compressible": true + }, + "application/riscos": { + "source": "iana" + }, + "application/rlmi+xml": { + "source": "iana", + "compressible": true + }, + "application/rls-services+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rs"] + }, + "application/route-apd+xml": { + "source": "iana", + "compressible": true + }, + "application/route-s-tsid+xml": { + "source": "iana", + "compressible": true + }, + "application/route-usd+xml": { + "source": "iana", + "compressible": true + }, + "application/rpki-ghostbusters": { + "source": "iana", + "extensions": ["gbr"] + }, + "application/rpki-manifest": { + "source": "iana", + "extensions": ["mft"] + }, + "application/rpki-publication": { + "source": "iana" + }, + "application/rpki-roa": { + "source": "iana", + "extensions": ["roa"] + }, + "application/rpki-updown": { + "source": "iana" + }, + "application/rsd+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rsd"] + }, + "application/rss+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rss"] + }, + "application/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "application/rtploopback": { + "source": "iana" + }, + "application/rtx": { + "source": "iana" + }, + "application/samlassertion+xml": { + "source": "iana", + "compressible": true + }, + "application/samlmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/sbml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sbml"] + }, + "application/scaip+xml": { + "source": "iana", + "compressible": true + }, + "application/scim+json": { + "source": "iana", + "compressible": true + }, + "application/scvp-cv-request": { + "source": "iana", + "extensions": ["scq"] + }, + "application/scvp-cv-response": { + "source": "iana", + "extensions": ["scs"] + }, + "application/scvp-vp-request": { + "source": "iana", + "extensions": ["spq"] + }, + "application/scvp-vp-response": { + "source": "iana", + "extensions": ["spp"] + }, + "application/sdp": { + "source": "iana", + "extensions": ["sdp"] + }, + "application/secevent+jwt": { + "source": "iana" + }, + "application/senml+cbor": { + "source": "iana" + }, + "application/senml+json": { + "source": "iana", + "compressible": true + }, + "application/senml+xml": { + "source": "iana", + "compressible": true + }, + "application/senml-exi": { + "source": "iana" + }, + "application/sensml+cbor": { + "source": "iana" + }, + "application/sensml+json": { + "source": "iana", + "compressible": true + }, + "application/sensml+xml": { + "source": "iana", + "compressible": true + }, + "application/sensml-exi": { + "source": "iana" + }, + "application/sep+xml": { + "source": "iana", + "compressible": true + }, + "application/sep-exi": { + "source": "iana" + }, + "application/session-info": { + "source": "iana" + }, + "application/set-payment": { + "source": "iana" + }, + "application/set-payment-initiation": { + "source": "iana", + "extensions": ["setpay"] + }, + "application/set-registration": { + "source": "iana" + }, + "application/set-registration-initiation": { + "source": "iana", + "extensions": ["setreg"] + }, + "application/sgml": { + "source": "iana" + }, + "application/sgml-open-catalog": { + "source": "iana" + }, + "application/shf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["shf"] + }, + "application/sieve": { + "source": "iana", + "extensions": ["siv","sieve"] + }, + "application/simple-filter+xml": { + "source": "iana", + "compressible": true + }, + "application/simple-message-summary": { + "source": "iana" + }, + "application/simplesymbolcontainer": { + "source": "iana" + }, + "application/slate": { + "source": "iana" + }, + "application/smil": { + "source": "iana" + }, + "application/smil+xml": { + "source": "iana", + "compressible": true, + "extensions": ["smi","smil"] + }, + "application/smpte336m": { + "source": "iana" + }, + "application/soap+fastinfoset": { + "source": "iana" + }, + "application/soap+xml": { + "source": "iana", + "compressible": true + }, + "application/sparql-query": { + "source": "iana", + "extensions": ["rq"] + }, + "application/sparql-results+xml": { + "source": "iana", + "compressible": true, + "extensions": ["srx"] + }, + "application/spirits-event+xml": { + "source": "iana", + "compressible": true + }, + "application/sql": { + "source": "iana" + }, + "application/srgs": { + "source": "iana", + "extensions": ["gram"] + }, + "application/srgs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["grxml"] + }, + "application/sru+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sru"] + }, + "application/ssdl+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ssdl"] + }, + "application/ssml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ssml"] + }, + "application/stix+json": { + "source": "iana", + "compressible": true + }, + "application/tamp-apex-update": { + "source": "iana" + }, + "application/tamp-apex-update-confirm": { + "source": "iana" + }, + "application/tamp-community-update": { + "source": "iana" + }, + "application/tamp-community-update-confirm": { + "source": "iana" + }, + "application/tamp-error": { + "source": "iana" + }, + "application/tamp-sequence-adjust": { + "source": "iana" + }, + "application/tamp-sequence-adjust-confirm": { + "source": "iana" + }, + "application/tamp-status-query": { + "source": "iana" + }, + "application/tamp-status-response": { + "source": "iana" + }, + "application/tamp-update": { + "source": "iana" + }, + "application/tamp-update-confirm": { + "source": "iana" + }, + "application/tar": { + "compressible": true + }, + "application/taxii+json": { + "source": "iana", + "compressible": true + }, + "application/tei+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tei","teicorpus"] + }, + "application/tetra_isi": { + "source": "iana" + }, + "application/thraud+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tfi"] + }, + "application/timestamp-query": { + "source": "iana" + }, + "application/timestamp-reply": { + "source": "iana" + }, + "application/timestamped-data": { + "source": "iana", + "extensions": ["tsd"] + }, + "application/tlsrpt+gzip": { + "source": "iana" + }, + "application/tlsrpt+json": { + "source": "iana", + "compressible": true + }, + "application/tnauthlist": { + "source": "iana" + }, + "application/trickle-ice-sdpfrag": { + "source": "iana" + }, + "application/trig": { + "source": "iana" + }, + "application/ttml+xml": { + "source": "iana", + "compressible": true + }, + "application/tve-trigger": { + "source": "iana" + }, + "application/tzif": { + "source": "iana" + }, + "application/tzif-leap": { + "source": "iana" + }, + "application/ulpfec": { + "source": "iana" + }, + "application/urc-grpsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/urc-ressheet+xml": { + "source": "iana", + "compressible": true + }, + "application/urc-targetdesc+xml": { + "source": "iana", + "compressible": true + }, + "application/urc-uisocketdesc+xml": { + "source": "iana", + "compressible": true + }, + "application/vcard+json": { + "source": "iana", + "compressible": true + }, + "application/vcard+xml": { + "source": "iana", + "compressible": true + }, + "application/vemmi": { + "source": "iana" + }, + "application/vividence.scriptfile": { + "source": "apache" + }, + "application/vnd.1000minds.decision-model+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-prose+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-prose-pc3ch+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-v2x-local-service-information": { + "source": "iana" + }, + "application/vnd.3gpp.access-transfer-events+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.bsf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.gmop+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mc-signalling-ear": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-payload": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-signalling": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-floor-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-signed+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-init-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-transmission-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mid-call+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.pic-bw-large": { + "source": "iana", + "extensions": ["plb"] + }, + "application/vnd.3gpp.pic-bw-small": { + "source": "iana", + "extensions": ["psb"] + }, + "application/vnd.3gpp.pic-bw-var": { + "source": "iana", + "extensions": ["pvb"] + }, + "application/vnd.3gpp.sms": { + "source": "iana" + }, + "application/vnd.3gpp.sms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-ext+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.state-and-event-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.ussd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.bcmcsinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.sms": { + "source": "iana" + }, + "application/vnd.3gpp2.tcap": { + "source": "iana", + "extensions": ["tcap"] + }, + "application/vnd.3lightssoftware.imagescal": { + "source": "iana" + }, + "application/vnd.3m.post-it-notes": { + "source": "iana", + "extensions": ["pwn"] + }, + "application/vnd.accpac.simply.aso": { + "source": "iana", + "extensions": ["aso"] + }, + "application/vnd.accpac.simply.imp": { + "source": "iana", + "extensions": ["imp"] + }, + "application/vnd.acucobol": { + "source": "iana", + "extensions": ["acu"] + }, + "application/vnd.acucorp": { + "source": "iana", + "extensions": ["atc","acutc"] + }, + "application/vnd.adobe.air-application-installer-package+zip": { + "source": "apache", + "compressible": false, + "extensions": ["air"] + }, + "application/vnd.adobe.flash.movie": { + "source": "iana" + }, + "application/vnd.adobe.formscentral.fcdt": { + "source": "iana", + "extensions": ["fcdt"] + }, + "application/vnd.adobe.fxp": { + "source": "iana", + "extensions": ["fxp","fxpl"] + }, + "application/vnd.adobe.partial-upload": { + "source": "iana" + }, + "application/vnd.adobe.xdp+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdp"] + }, + "application/vnd.adobe.xfdf": { + "source": "iana", + "extensions": ["xfdf"] + }, + "application/vnd.aether.imp": { + "source": "iana" + }, + "application/vnd.afpc.afplinedata": { + "source": "iana" + }, + "application/vnd.afpc.modca": { + "source": "iana" + }, + "application/vnd.ah-barcode": { + "source": "iana" + }, + "application/vnd.ahead.space": { + "source": "iana", + "extensions": ["ahead"] + }, + "application/vnd.airzip.filesecure.azf": { + "source": "iana", + "extensions": ["azf"] + }, + "application/vnd.airzip.filesecure.azs": { + "source": "iana", + "extensions": ["azs"] + }, + "application/vnd.amadeus+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.amazon.ebook": { + "source": "apache", + "extensions": ["azw"] + }, + "application/vnd.amazon.mobi8-ebook": { + "source": "iana" + }, + "application/vnd.americandynamics.acc": { + "source": "iana", + "extensions": ["acc"] + }, + "application/vnd.amiga.ami": { + "source": "iana", + "extensions": ["ami"] + }, + "application/vnd.amundsen.maze+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.android.package-archive": { + "source": "apache", + "compressible": false, + "extensions": ["apk"] + }, + "application/vnd.anki": { + "source": "iana" + }, + "application/vnd.anser-web-certificate-issue-initiation": { + "source": "iana", + "extensions": ["cii"] + }, + "application/vnd.anser-web-funds-transfer-initiation": { + "source": "apache", + "extensions": ["fti"] + }, + "application/vnd.antix.game-component": { + "source": "iana", + "extensions": ["atx"] + }, + "application/vnd.apache.thrift.binary": { + "source": "iana" + }, + "application/vnd.apache.thrift.compact": { + "source": "iana" + }, + "application/vnd.apache.thrift.json": { + "source": "iana" + }, + "application/vnd.api+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apothekende.reservation+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apple.installer+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpkg"] + }, + "application/vnd.apple.keynote": { + "source": "iana", + "extensions": ["keynote"] + }, + "application/vnd.apple.mpegurl": { + "source": "iana", + "extensions": ["m3u8"] + }, + "application/vnd.apple.numbers": { + "source": "iana", + "extensions": ["numbers"] + }, + "application/vnd.apple.pages": { + "source": "iana", + "extensions": ["pages"] + }, + "application/vnd.apple.pkpass": { + "compressible": false, + "extensions": ["pkpass"] + }, + "application/vnd.arastra.swi": { + "source": "iana" + }, + "application/vnd.aristanetworks.swi": { + "source": "iana", + "extensions": ["swi"] + }, + "application/vnd.artisan+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.artsquare": { + "source": "iana" + }, + "application/vnd.astraea-software.iota": { + "source": "iana", + "extensions": ["iota"] + }, + "application/vnd.audiograph": { + "source": "iana", + "extensions": ["aep"] + }, + "application/vnd.autopackage": { + "source": "iana" + }, + "application/vnd.avalon+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.avistar+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.balsamiq.bmml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.balsamiq.bmpr": { + "source": "iana" + }, + "application/vnd.banana-accounting": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bekitzur-stech+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bint.med-content": { + "source": "iana" + }, + "application/vnd.biopax.rdf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.blink-idb-value-wrapper": { + "source": "iana" + }, + "application/vnd.blueice.multipass": { + "source": "iana", + "extensions": ["mpm"] + }, + "application/vnd.bluetooth.ep.oob": { + "source": "iana" + }, + "application/vnd.bluetooth.le.oob": { + "source": "iana" + }, + "application/vnd.bmi": { + "source": "iana", + "extensions": ["bmi"] + }, + "application/vnd.businessobjects": { + "source": "iana", + "extensions": ["rep"] + }, + "application/vnd.byu.uapi+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cab-jscript": { + "source": "iana" + }, + "application/vnd.canon-cpdl": { + "source": "iana" + }, + "application/vnd.canon-lips": { + "source": "iana" + }, + "application/vnd.capasystems-pg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cendio.thinlinc.clientconf": { + "source": "iana" + }, + "application/vnd.century-systems.tcp_stream": { + "source": "iana" + }, + "application/vnd.chemdraw+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cdxml"] + }, + "application/vnd.chess-pgn": { + "source": "iana" + }, + "application/vnd.chipnuts.karaoke-mmd": { + "source": "iana", + "extensions": ["mmd"] + }, + "application/vnd.cinderella": { + "source": "iana", + "extensions": ["cdy"] + }, + "application/vnd.cirpack.isdn-ext": { + "source": "iana" + }, + "application/vnd.citationstyles.style+xml": { + "source": "iana", + "compressible": true, + "extensions": ["csl"] + }, + "application/vnd.claymore": { + "source": "iana", + "extensions": ["cla"] + }, + "application/vnd.cloanto.rp9": { + "source": "iana", + "extensions": ["rp9"] + }, + "application/vnd.clonk.c4group": { + "source": "iana", + "extensions": ["c4g","c4d","c4f","c4p","c4u"] + }, + "application/vnd.cluetrust.cartomobile-config": { + "source": "iana", + "extensions": ["c11amc"] + }, + "application/vnd.cluetrust.cartomobile-config-pkg": { + "source": "iana", + "extensions": ["c11amz"] + }, + "application/vnd.coffeescript": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet-template": { + "source": "iana" + }, + "application/vnd.collection+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.doc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.next+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.comicbook+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.comicbook-rar": { + "source": "iana" + }, + "application/vnd.commerce-battelle": { + "source": "iana" + }, + "application/vnd.commonspace": { + "source": "iana", + "extensions": ["csp"] + }, + "application/vnd.contact.cmsg": { + "source": "iana", + "extensions": ["cdbcmsg"] + }, + "application/vnd.coreos.ignition+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cosmocaller": { + "source": "iana", + "extensions": ["cmc"] + }, + "application/vnd.crick.clicker": { + "source": "iana", + "extensions": ["clkx"] + }, + "application/vnd.crick.clicker.keyboard": { + "source": "iana", + "extensions": ["clkk"] + }, + "application/vnd.crick.clicker.palette": { + "source": "iana", + "extensions": ["clkp"] + }, + "application/vnd.crick.clicker.template": { + "source": "iana", + "extensions": ["clkt"] + }, + "application/vnd.crick.clicker.wordbank": { + "source": "iana", + "extensions": ["clkw"] + }, + "application/vnd.criticaltools.wbs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wbs"] + }, + "application/vnd.ctc-posml": { + "source": "iana", + "extensions": ["pml"] + }, + "application/vnd.ctct.ws+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cups-pdf": { + "source": "iana" + }, + "application/vnd.cups-postscript": { + "source": "iana" + }, + "application/vnd.cups-ppd": { + "source": "iana", + "extensions": ["ppd"] + }, + "application/vnd.cups-raster": { + "source": "iana" + }, + "application/vnd.cups-raw": { + "source": "iana" + }, + "application/vnd.curl": { + "source": "iana" + }, + "application/vnd.curl.car": { + "source": "apache", + "extensions": ["car"] + }, + "application/vnd.curl.pcurl": { + "source": "apache", + "extensions": ["pcurl"] + }, + "application/vnd.cyan.dean.root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cybank": { + "source": "iana" + }, + "application/vnd.d2l.coursepackage1p0+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.dart": { + "source": "iana", + "compressible": true, + "extensions": ["dart"] + }, + "application/vnd.data-vision.rdz": { + "source": "iana", + "extensions": ["rdz"] + }, + "application/vnd.datapackage+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dataresource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.debian.binary-package": { + "source": "iana" + }, + "application/vnd.dece.data": { + "source": "iana", + "extensions": ["uvf","uvvf","uvd","uvvd"] + }, + "application/vnd.dece.ttml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uvt","uvvt"] + }, + "application/vnd.dece.unspecified": { + "source": "iana", + "extensions": ["uvx","uvvx"] + }, + "application/vnd.dece.zip": { + "source": "iana", + "extensions": ["uvz","uvvz"] + }, + "application/vnd.denovo.fcselayout-link": { + "source": "iana", + "extensions": ["fe_launch"] + }, + "application/vnd.desmume.movie": { + "source": "iana" + }, + "application/vnd.dir-bi.plate-dl-nosuffix": { + "source": "iana" + }, + "application/vnd.dm.delegation+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dna": { + "source": "iana", + "extensions": ["dna"] + }, + "application/vnd.document+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dolby.mlp": { + "source": "apache", + "extensions": ["mlp"] + }, + "application/vnd.dolby.mobile.1": { + "source": "iana" + }, + "application/vnd.dolby.mobile.2": { + "source": "iana" + }, + "application/vnd.doremir.scorecloud-binary-document": { + "source": "iana" + }, + "application/vnd.dpgraph": { + "source": "iana", + "extensions": ["dpg"] + }, + "application/vnd.dreamfactory": { + "source": "iana", + "extensions": ["dfac"] + }, + "application/vnd.drive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ds-keypoint": { + "source": "apache", + "extensions": ["kpxx"] + }, + "application/vnd.dtg.local": { + "source": "iana" + }, + "application/vnd.dtg.local.flash": { + "source": "iana" + }, + "application/vnd.dtg.local.html": { + "source": "iana" + }, + "application/vnd.dvb.ait": { + "source": "iana", + "extensions": ["ait"] + }, + "application/vnd.dvb.dvbj": { + "source": "iana" + }, + "application/vnd.dvb.esgcontainer": { + "source": "iana" + }, + "application/vnd.dvb.ipdcdftnotifaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess2": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgpdd": { + "source": "iana" + }, + "application/vnd.dvb.ipdcroaming": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-base": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-enhancement": { + "source": "iana" + }, + "application/vnd.dvb.notif-aggregate-root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-container+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-generic+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-msglist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-response+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-init+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.pfr": { + "source": "iana" + }, + "application/vnd.dvb.service": { + "source": "iana", + "extensions": ["svc"] + }, + "application/vnd.dxr": { + "source": "iana" + }, + "application/vnd.dynageo": { + "source": "iana", + "extensions": ["geo"] + }, + "application/vnd.dzr": { + "source": "iana" + }, + "application/vnd.easykaraoke.cdgdownload": { + "source": "iana" + }, + "application/vnd.ecdis-update": { + "source": "iana" + }, + "application/vnd.ecip.rlp": { + "source": "iana" + }, + "application/vnd.ecowin.chart": { + "source": "iana", + "extensions": ["mag"] + }, + "application/vnd.ecowin.filerequest": { + "source": "iana" + }, + "application/vnd.ecowin.fileupdate": { + "source": "iana" + }, + "application/vnd.ecowin.series": { + "source": "iana" + }, + "application/vnd.ecowin.seriesrequest": { + "source": "iana" + }, + "application/vnd.ecowin.seriesupdate": { + "source": "iana" + }, + "application/vnd.efi.img": { + "source": "iana" + }, + "application/vnd.efi.iso": { + "source": "iana" + }, + "application/vnd.emclient.accessrequest+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.enliven": { + "source": "iana", + "extensions": ["nml"] + }, + "application/vnd.enphase.envoy": { + "source": "iana" + }, + "application/vnd.eprints.data+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.epson.esf": { + "source": "iana", + "extensions": ["esf"] + }, + "application/vnd.epson.msf": { + "source": "iana", + "extensions": ["msf"] + }, + "application/vnd.epson.quickanime": { + "source": "iana", + "extensions": ["qam"] + }, + "application/vnd.epson.salt": { + "source": "iana", + "extensions": ["slt"] + }, + "application/vnd.epson.ssf": { + "source": "iana", + "extensions": ["ssf"] + }, + "application/vnd.ericsson.quickcall": { + "source": "iana" + }, + "application/vnd.espass-espass+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.eszigno3+xml": { + "source": "iana", + "compressible": true, + "extensions": ["es3","et3"] + }, + "application/vnd.etsi.aoc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.asic-e+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.asic-s+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.cug+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvcommand+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-bc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-cod+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-npvr+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvservice+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsync+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mcid+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mheg5": { + "source": "iana" + }, + "application/vnd.etsi.overload-control-policy-dataset+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.pstn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.sci+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.simservs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.timestamp-token": { + "source": "iana" + }, + "application/vnd.etsi.tsl+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.tsl.der": { + "source": "iana" + }, + "application/vnd.eudora.data": { + "source": "iana" + }, + "application/vnd.evolv.ecig.profile": { + "source": "iana" + }, + "application/vnd.evolv.ecig.settings": { + "source": "iana" + }, + "application/vnd.evolv.ecig.theme": { + "source": "iana" + }, + "application/vnd.exstream-empower+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.exstream-package": { + "source": "iana" + }, + "application/vnd.ezpix-album": { + "source": "iana", + "extensions": ["ez2"] + }, + "application/vnd.ezpix-package": { + "source": "iana", + "extensions": ["ez3"] + }, + "application/vnd.f-secure.mobile": { + "source": "iana" + }, + "application/vnd.fastcopy-disk-image": { + "source": "iana" + }, + "application/vnd.fdf": { + "source": "iana", + "extensions": ["fdf"] + }, + "application/vnd.fdsn.mseed": { + "source": "iana", + "extensions": ["mseed"] + }, + "application/vnd.fdsn.seed": { + "source": "iana", + "extensions": ["seed","dataless"] + }, + "application/vnd.ffsns": { + "source": "iana" + }, + "application/vnd.filmit.zfc": { + "source": "iana" + }, + "application/vnd.fints": { + "source": "iana" + }, + "application/vnd.firemonkeys.cloudcell": { + "source": "iana" + }, + "application/vnd.flographit": { + "source": "iana", + "extensions": ["gph"] + }, + "application/vnd.fluxtime.clip": { + "source": "iana", + "extensions": ["ftc"] + }, + "application/vnd.font-fontforge-sfd": { + "source": "iana" + }, + "application/vnd.framemaker": { + "source": "iana", + "extensions": ["fm","frame","maker","book"] + }, + "application/vnd.frogans.fnc": { + "source": "iana", + "extensions": ["fnc"] + }, + "application/vnd.frogans.ltf": { + "source": "iana", + "extensions": ["ltf"] + }, + "application/vnd.fsc.weblaunch": { + "source": "iana", + "extensions": ["fsc"] + }, + "application/vnd.fujitsu.oasys": { + "source": "iana", + "extensions": ["oas"] + }, + "application/vnd.fujitsu.oasys2": { + "source": "iana", + "extensions": ["oa2"] + }, + "application/vnd.fujitsu.oasys3": { + "source": "iana", + "extensions": ["oa3"] + }, + "application/vnd.fujitsu.oasysgp": { + "source": "iana", + "extensions": ["fg5"] + }, + "application/vnd.fujitsu.oasysprs": { + "source": "iana", + "extensions": ["bh2"] + }, + "application/vnd.fujixerox.art-ex": { + "source": "iana" + }, + "application/vnd.fujixerox.art4": { + "source": "iana" + }, + "application/vnd.fujixerox.ddd": { + "source": "iana", + "extensions": ["ddd"] + }, + "application/vnd.fujixerox.docuworks": { + "source": "iana", + "extensions": ["xdw"] + }, + "application/vnd.fujixerox.docuworks.binder": { + "source": "iana", + "extensions": ["xbd"] + }, + "application/vnd.fujixerox.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujixerox.hbpl": { + "source": "iana" + }, + "application/vnd.fut-misnet": { + "source": "iana" + }, + "application/vnd.futoin+cbor": { + "source": "iana" + }, + "application/vnd.futoin+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.fuzzysheet": { + "source": "iana", + "extensions": ["fzs"] + }, + "application/vnd.genomatix.tuxedo": { + "source": "iana", + "extensions": ["txd"] + }, + "application/vnd.geo+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geocube+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.geogebra.file": { + "source": "iana", + "extensions": ["ggb"] + }, + "application/vnd.geogebra.tool": { + "source": "iana", + "extensions": ["ggt"] + }, + "application/vnd.geometry-explorer": { + "source": "iana", + "extensions": ["gex","gre"] + }, + "application/vnd.geonext": { + "source": "iana", + "extensions": ["gxt"] + }, + "application/vnd.geoplan": { + "source": "iana", + "extensions": ["g2w"] + }, + "application/vnd.geospace": { + "source": "iana", + "extensions": ["g3w"] + }, + "application/vnd.gerber": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt-response": { + "source": "iana" + }, + "application/vnd.gmx": { + "source": "iana", + "extensions": ["gmx"] + }, + "application/vnd.google-apps.document": { + "compressible": false, + "extensions": ["gdoc"] + }, + "application/vnd.google-apps.presentation": { + "compressible": false, + "extensions": ["gslides"] + }, + "application/vnd.google-apps.spreadsheet": { + "compressible": false, + "extensions": ["gsheet"] + }, + "application/vnd.google-earth.kml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["kml"] + }, + "application/vnd.google-earth.kmz": { + "source": "iana", + "compressible": false, + "extensions": ["kmz"] + }, + "application/vnd.gov.sk.e-form+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.gov.sk.e-form+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.gov.sk.xmldatacontainer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.grafeq": { + "source": "iana", + "extensions": ["gqf","gqs"] + }, + "application/vnd.gridmp": { + "source": "iana" + }, + "application/vnd.groove-account": { + "source": "iana", + "extensions": ["gac"] + }, + "application/vnd.groove-help": { + "source": "iana", + "extensions": ["ghf"] + }, + "application/vnd.groove-identity-message": { + "source": "iana", + "extensions": ["gim"] + }, + "application/vnd.groove-injector": { + "source": "iana", + "extensions": ["grv"] + }, + "application/vnd.groove-tool-message": { + "source": "iana", + "extensions": ["gtm"] + }, + "application/vnd.groove-tool-template": { + "source": "iana", + "extensions": ["tpl"] + }, + "application/vnd.groove-vcard": { + "source": "iana", + "extensions": ["vcg"] + }, + "application/vnd.hal+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hal+xml": { + "source": "iana", + "compressible": true, + "extensions": ["hal"] + }, + "application/vnd.handheld-entertainment+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zmm"] + }, + "application/vnd.hbci": { + "source": "iana", + "extensions": ["hbci"] + }, + "application/vnd.hc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hcl-bireports": { + "source": "iana" + }, + "application/vnd.hdt": { + "source": "iana" + }, + "application/vnd.heroku+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hhe.lesson-player": { + "source": "iana", + "extensions": ["les"] + }, + "application/vnd.hp-hpgl": { + "source": "iana", + "extensions": ["hpgl"] + }, + "application/vnd.hp-hpid": { + "source": "iana", + "extensions": ["hpid"] + }, + "application/vnd.hp-hps": { + "source": "iana", + "extensions": ["hps"] + }, + "application/vnd.hp-jlyt": { + "source": "iana", + "extensions": ["jlt"] + }, + "application/vnd.hp-pcl": { + "source": "iana", + "extensions": ["pcl"] + }, + "application/vnd.hp-pclxl": { + "source": "iana", + "extensions": ["pclxl"] + }, + "application/vnd.httphone": { + "source": "iana" + }, + "application/vnd.hydrostatix.sof-data": { + "source": "iana", + "extensions": ["sfd-hdstx"] + }, + "application/vnd.hyper+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyper-item+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyperdrive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hzn-3d-crossword": { + "source": "iana" + }, + "application/vnd.ibm.afplinedata": { + "source": "iana" + }, + "application/vnd.ibm.electronic-media": { + "source": "iana" + }, + "application/vnd.ibm.minipay": { + "source": "iana", + "extensions": ["mpy"] + }, + "application/vnd.ibm.modcap": { + "source": "iana", + "extensions": ["afp","listafp","list3820"] + }, + "application/vnd.ibm.rights-management": { + "source": "iana", + "extensions": ["irm"] + }, + "application/vnd.ibm.secure-container": { + "source": "iana", + "extensions": ["sc"] + }, + "application/vnd.iccprofile": { + "source": "iana", + "extensions": ["icc","icm"] + }, + "application/vnd.ieee.1905": { + "source": "iana" + }, + "application/vnd.igloader": { + "source": "iana", + "extensions": ["igl"] + }, + "application/vnd.imagemeter.folder+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.imagemeter.image+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.immervision-ivp": { + "source": "iana", + "extensions": ["ivp"] + }, + "application/vnd.immervision-ivu": { + "source": "iana", + "extensions": ["ivu"] + }, + "application/vnd.ims.imsccv1p1": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p2": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p3": { + "source": "iana" + }, + "application/vnd.ims.lis.v2.result+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolconsumerprofile+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy.id+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings.simple+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.informedcontrol.rms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.informix-visionary": { + "source": "iana" + }, + "application/vnd.infotech.project": { + "source": "iana" + }, + "application/vnd.infotech.project+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.innopath.wamp.notification": { + "source": "iana" + }, + "application/vnd.insors.igm": { + "source": "iana", + "extensions": ["igm"] + }, + "application/vnd.intercon.formnet": { + "source": "iana", + "extensions": ["xpw","xpx"] + }, + "application/vnd.intergeo": { + "source": "iana", + "extensions": ["i2g"] + }, + "application/vnd.intertrust.digibox": { + "source": "iana" + }, + "application/vnd.intertrust.nncp": { + "source": "iana" + }, + "application/vnd.intu.qbo": { + "source": "iana", + "extensions": ["qbo"] + }, + "application/vnd.intu.qfx": { + "source": "iana", + "extensions": ["qfx"] + }, + "application/vnd.iptc.g2.catalogitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.conceptitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.knowledgeitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.packageitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.planningitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ipunplugged.rcprofile": { + "source": "iana", + "extensions": ["rcprofile"] + }, + "application/vnd.irepository.package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["irp"] + }, + "application/vnd.is-xpr": { + "source": "iana", + "extensions": ["xpr"] + }, + "application/vnd.isac.fcs": { + "source": "iana", + "extensions": ["fcs"] + }, + "application/vnd.jam": { + "source": "iana", + "extensions": ["jam"] + }, + "application/vnd.japannet-directory-service": { + "source": "iana" + }, + "application/vnd.japannet-jpnstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-payment-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-registration": { + "source": "iana" + }, + "application/vnd.japannet-registration-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-setstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-verification": { + "source": "iana" + }, + "application/vnd.japannet-verification-wakeup": { + "source": "iana" + }, + "application/vnd.jcp.javame.midlet-rms": { + "source": "iana", + "extensions": ["rms"] + }, + "application/vnd.jisp": { + "source": "iana", + "extensions": ["jisp"] + }, + "application/vnd.joost.joda-archive": { + "source": "iana", + "extensions": ["joda"] + }, + "application/vnd.jsk.isdn-ngn": { + "source": "iana" + }, + "application/vnd.kahootz": { + "source": "iana", + "extensions": ["ktz","ktr"] + }, + "application/vnd.kde.karbon": { + "source": "iana", + "extensions": ["karbon"] + }, + "application/vnd.kde.kchart": { + "source": "iana", + "extensions": ["chrt"] + }, + "application/vnd.kde.kformula": { + "source": "iana", + "extensions": ["kfo"] + }, + "application/vnd.kde.kivio": { + "source": "iana", + "extensions": ["flw"] + }, + "application/vnd.kde.kontour": { + "source": "iana", + "extensions": ["kon"] + }, + "application/vnd.kde.kpresenter": { + "source": "iana", + "extensions": ["kpr","kpt"] + }, + "application/vnd.kde.kspread": { + "source": "iana", + "extensions": ["ksp"] + }, + "application/vnd.kde.kword": { + "source": "iana", + "extensions": ["kwd","kwt"] + }, + "application/vnd.kenameaapp": { + "source": "iana", + "extensions": ["htke"] + }, + "application/vnd.kidspiration": { + "source": "iana", + "extensions": ["kia"] + }, + "application/vnd.kinar": { + "source": "iana", + "extensions": ["kne","knp"] + }, + "application/vnd.koan": { + "source": "iana", + "extensions": ["skp","skd","skt","skm"] + }, + "application/vnd.kodak-descriptor": { + "source": "iana", + "extensions": ["sse"] + }, + "application/vnd.las.las+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.las.las+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lasxml"] + }, + "application/vnd.leap+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.liberty-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.llamagraphics.life-balance.desktop": { + "source": "iana", + "extensions": ["lbd"] + }, + "application/vnd.llamagraphics.life-balance.exchange+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lbe"] + }, + "application/vnd.lotus-1-2-3": { + "source": "iana", + "extensions": ["123"] + }, + "application/vnd.lotus-approach": { + "source": "iana", + "extensions": ["apr"] + }, + "application/vnd.lotus-freelance": { + "source": "iana", + "extensions": ["pre"] + }, + "application/vnd.lotus-notes": { + "source": "iana", + "extensions": ["nsf"] + }, + "application/vnd.lotus-organizer": { + "source": "iana", + "extensions": ["org"] + }, + "application/vnd.lotus-screencam": { + "source": "iana", + "extensions": ["scm"] + }, + "application/vnd.lotus-wordpro": { + "source": "iana", + "extensions": ["lwp"] + }, + "application/vnd.macports.portpkg": { + "source": "iana", + "extensions": ["portpkg"] + }, + "application/vnd.mapbox-vector-tile": { + "source": "iana" + }, + "application/vnd.marlin.drm.actiontoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.conftoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.license+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.mdcf": { + "source": "iana" + }, + "application/vnd.mason+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.maxmind.maxmind-db": { + "source": "iana" + }, + "application/vnd.mcd": { + "source": "iana", + "extensions": ["mcd"] + }, + "application/vnd.medcalcdata": { + "source": "iana", + "extensions": ["mc1"] + }, + "application/vnd.mediastation.cdkey": { + "source": "iana", + "extensions": ["cdkey"] + }, + "application/vnd.meridian-slingshot": { + "source": "iana" + }, + "application/vnd.mfer": { + "source": "iana", + "extensions": ["mwf"] + }, + "application/vnd.mfmp": { + "source": "iana", + "extensions": ["mfm"] + }, + "application/vnd.micro+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.micrografx.flo": { + "source": "iana", + "extensions": ["flo"] + }, + "application/vnd.micrografx.igx": { + "source": "iana", + "extensions": ["igx"] + }, + "application/vnd.microsoft.portable-executable": { + "source": "iana" + }, + "application/vnd.microsoft.windows.thumbnail-cache": { + "source": "iana" + }, + "application/vnd.miele+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.mif": { + "source": "iana", + "extensions": ["mif"] + }, + "application/vnd.minisoft-hp3000-save": { + "source": "iana" + }, + "application/vnd.mitsubishi.misty-guard.trustweb": { + "source": "iana" + }, + "application/vnd.mobius.daf": { + "source": "iana", + "extensions": ["daf"] + }, + "application/vnd.mobius.dis": { + "source": "iana", + "extensions": ["dis"] + }, + "application/vnd.mobius.mbk": { + "source": "iana", + "extensions": ["mbk"] + }, + "application/vnd.mobius.mqy": { + "source": "iana", + "extensions": ["mqy"] + }, + "application/vnd.mobius.msl": { + "source": "iana", + "extensions": ["msl"] + }, + "application/vnd.mobius.plc": { + "source": "iana", + "extensions": ["plc"] + }, + "application/vnd.mobius.txf": { + "source": "iana", + "extensions": ["txf"] + }, + "application/vnd.mophun.application": { + "source": "iana", + "extensions": ["mpn"] + }, + "application/vnd.mophun.certificate": { + "source": "iana", + "extensions": ["mpc"] + }, + "application/vnd.motorola.flexsuite": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.adsi": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.fis": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.gotap": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.kmr": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.ttc": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.wem": { + "source": "iana" + }, + "application/vnd.motorola.iprm": { + "source": "iana" + }, + "application/vnd.mozilla.xul+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xul"] + }, + "application/vnd.ms-3mfdocument": { + "source": "iana" + }, + "application/vnd.ms-artgalry": { + "source": "iana", + "extensions": ["cil"] + }, + "application/vnd.ms-asf": { + "source": "iana" + }, + "application/vnd.ms-cab-compressed": { + "source": "iana", + "extensions": ["cab"] + }, + "application/vnd.ms-color.iccprofile": { + "source": "apache" + }, + "application/vnd.ms-excel": { + "source": "iana", + "compressible": false, + "extensions": ["xls","xlm","xla","xlc","xlt","xlw"] + }, + "application/vnd.ms-excel.addin.macroenabled.12": { + "source": "iana", + "extensions": ["xlam"] + }, + "application/vnd.ms-excel.sheet.binary.macroenabled.12": { + "source": "iana", + "extensions": ["xlsb"] + }, + "application/vnd.ms-excel.sheet.macroenabled.12": { + "source": "iana", + "extensions": ["xlsm"] + }, + "application/vnd.ms-excel.template.macroenabled.12": { + "source": "iana", + "extensions": ["xltm"] + }, + "application/vnd.ms-fontobject": { + "source": "iana", + "compressible": true, + "extensions": ["eot"] + }, + "application/vnd.ms-htmlhelp": { + "source": "iana", + "extensions": ["chm"] + }, + "application/vnd.ms-ims": { + "source": "iana", + "extensions": ["ims"] + }, + "application/vnd.ms-lrm": { + "source": "iana", + "extensions": ["lrm"] + }, + "application/vnd.ms-office.activex+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-officetheme": { + "source": "iana", + "extensions": ["thmx"] + }, + "application/vnd.ms-opentype": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-outlook": { + "compressible": false, + "extensions": ["msg"] + }, + "application/vnd.ms-package.obfuscated-opentype": { + "source": "apache" + }, + "application/vnd.ms-pki.seccat": { + "source": "apache", + "extensions": ["cat"] + }, + "application/vnd.ms-pki.stl": { + "source": "apache", + "extensions": ["stl"] + }, + "application/vnd.ms-playready.initiator+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-powerpoint": { + "source": "iana", + "compressible": false, + "extensions": ["ppt","pps","pot"] + }, + "application/vnd.ms-powerpoint.addin.macroenabled.12": { + "source": "iana", + "extensions": ["ppam"] + }, + "application/vnd.ms-powerpoint.presentation.macroenabled.12": { + "source": "iana", + "extensions": ["pptm"] + }, + "application/vnd.ms-powerpoint.slide.macroenabled.12": { + "source": "iana", + "extensions": ["sldm"] + }, + "application/vnd.ms-powerpoint.slideshow.macroenabled.12": { + "source": "iana", + "extensions": ["ppsm"] + }, + "application/vnd.ms-powerpoint.template.macroenabled.12": { + "source": "iana", + "extensions": ["potm"] + }, + "application/vnd.ms-printdevicecapabilities+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-printing.printticket+xml": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-printschematicket+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-project": { + "source": "iana", + "extensions": ["mpp","mpt"] + }, + "application/vnd.ms-tnef": { + "source": "iana" + }, + "application/vnd.ms-windows.devicepairing": { + "source": "iana" + }, + "application/vnd.ms-windows.nwprinting.oob": { + "source": "iana" + }, + "application/vnd.ms-windows.printerpairing": { + "source": "iana" + }, + "application/vnd.ms-windows.wsd.oob": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-resp": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-resp": { + "source": "iana" + }, + "application/vnd.ms-word.document.macroenabled.12": { + "source": "iana", + "extensions": ["docm"] + }, + "application/vnd.ms-word.template.macroenabled.12": { + "source": "iana", + "extensions": ["dotm"] + }, + "application/vnd.ms-works": { + "source": "iana", + "extensions": ["wps","wks","wcm","wdb"] + }, + "application/vnd.ms-wpl": { + "source": "iana", + "extensions": ["wpl"] + }, + "application/vnd.ms-xpsdocument": { + "source": "iana", + "compressible": false, + "extensions": ["xps"] + }, + "application/vnd.msa-disk-image": { + "source": "iana" + }, + "application/vnd.mseq": { + "source": "iana", + "extensions": ["mseq"] + }, + "application/vnd.msign": { + "source": "iana" + }, + "application/vnd.multiad.creator": { + "source": "iana" + }, + "application/vnd.multiad.creator.cif": { + "source": "iana" + }, + "application/vnd.music-niff": { + "source": "iana" + }, + "application/vnd.musician": { + "source": "iana", + "extensions": ["mus"] + }, + "application/vnd.muvee.style": { + "source": "iana", + "extensions": ["msty"] + }, + "application/vnd.mynfc": { + "source": "iana", + "extensions": ["taglet"] + }, + "application/vnd.ncd.control": { + "source": "iana" + }, + "application/vnd.ncd.reference": { + "source": "iana" + }, + "application/vnd.nearst.inv+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.nervana": { + "source": "iana" + }, + "application/vnd.netfpx": { + "source": "iana" + }, + "application/vnd.neurolanguage.nlu": { + "source": "iana", + "extensions": ["nlu"] + }, + "application/vnd.nimn": { + "source": "iana" + }, + "application/vnd.nintendo.nitro.rom": { + "source": "iana" + }, + "application/vnd.nintendo.snes.rom": { + "source": "iana" + }, + "application/vnd.nitf": { + "source": "iana", + "extensions": ["ntf","nitf"] + }, + "application/vnd.noblenet-directory": { + "source": "iana", + "extensions": ["nnd"] + }, + "application/vnd.noblenet-sealer": { + "source": "iana", + "extensions": ["nns"] + }, + "application/vnd.noblenet-web": { + "source": "iana", + "extensions": ["nnw"] + }, + "application/vnd.nokia.catalogs": { + "source": "iana" + }, + "application/vnd.nokia.conml+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.conml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.iptv.config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.isds-radio-presets": { + "source": "iana" + }, + "application/vnd.nokia.landmark+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.landmark+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.landmarkcollection+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.n-gage.ac+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.n-gage.data": { + "source": "iana", + "extensions": ["ngdat"] + }, + "application/vnd.nokia.n-gage.symbian.install": { + "source": "iana", + "extensions": ["n-gage"] + }, + "application/vnd.nokia.ncd": { + "source": "iana" + }, + "application/vnd.nokia.pcd+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.pcd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.radio-preset": { + "source": "iana", + "extensions": ["rpst"] + }, + "application/vnd.nokia.radio-presets": { + "source": "iana", + "extensions": ["rpss"] + }, + "application/vnd.novadigm.edm": { + "source": "iana", + "extensions": ["edm"] + }, + "application/vnd.novadigm.edx": { + "source": "iana", + "extensions": ["edx"] + }, + "application/vnd.novadigm.ext": { + "source": "iana", + "extensions": ["ext"] + }, + "application/vnd.ntt-local.content-share": { + "source": "iana" + }, + "application/vnd.ntt-local.file-transfer": { + "source": "iana" + }, + "application/vnd.ntt-local.ogw_remote-access": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_remote": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_tcp_stream": { + "source": "iana" + }, + "application/vnd.oasis.opendocument.chart": { + "source": "iana", + "extensions": ["odc"] + }, + "application/vnd.oasis.opendocument.chart-template": { + "source": "iana", + "extensions": ["otc"] + }, + "application/vnd.oasis.opendocument.database": { + "source": "iana", + "extensions": ["odb"] + }, + "application/vnd.oasis.opendocument.formula": { + "source": "iana", + "extensions": ["odf"] + }, + "application/vnd.oasis.opendocument.formula-template": { + "source": "iana", + "extensions": ["odft"] + }, + "application/vnd.oasis.opendocument.graphics": { + "source": "iana", + "compressible": false, + "extensions": ["odg"] + }, + "application/vnd.oasis.opendocument.graphics-template": { + "source": "iana", + "extensions": ["otg"] + }, + "application/vnd.oasis.opendocument.image": { + "source": "iana", + "extensions": ["odi"] + }, + "application/vnd.oasis.opendocument.image-template": { + "source": "iana", + "extensions": ["oti"] + }, + "application/vnd.oasis.opendocument.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["odp"] + }, + "application/vnd.oasis.opendocument.presentation-template": { + "source": "iana", + "extensions": ["otp"] + }, + "application/vnd.oasis.opendocument.spreadsheet": { + "source": "iana", + "compressible": false, + "extensions": ["ods"] + }, + "application/vnd.oasis.opendocument.spreadsheet-template": { + "source": "iana", + "extensions": ["ots"] + }, + "application/vnd.oasis.opendocument.text": { + "source": "iana", + "compressible": false, + "extensions": ["odt"] + }, + "application/vnd.oasis.opendocument.text-master": { + "source": "iana", + "extensions": ["odm"] + }, + "application/vnd.oasis.opendocument.text-template": { + "source": "iana", + "extensions": ["ott"] + }, + "application/vnd.oasis.opendocument.text-web": { + "source": "iana", + "extensions": ["oth"] + }, + "application/vnd.obn": { + "source": "iana" + }, + "application/vnd.ocf+cbor": { + "source": "iana" + }, + "application/vnd.oftn.l10n+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessdownload+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessstreaming+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.cspg-hexbinary": { + "source": "iana" + }, + "application/vnd.oipf.dae.svg+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.dae.xhtml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.mippvcontrolmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.pae.gem": { + "source": "iana" + }, + "application/vnd.oipf.spdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.spdlist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.ueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.userprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.olpc-sugar": { + "source": "iana", + "extensions": ["xo"] + }, + "application/vnd.oma-scws-config": { + "source": "iana" + }, + "application/vnd.oma-scws-http-request": { + "source": "iana" + }, + "application/vnd.oma-scws-http-response": { + "source": "iana" + }, + "application/vnd.oma.bcast.associated-procedure-parameter+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.drm-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.imd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.ltkm": { + "source": "iana" + }, + "application/vnd.oma.bcast.notification+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.provisioningtrigger": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgboot": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgdd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sgdu": { + "source": "iana" + }, + "application/vnd.oma.bcast.simple-symbol-container": { + "source": "iana" + }, + "application/vnd.oma.bcast.smartcard-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sprov+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.stkm": { + "source": "iana" + }, + "application/vnd.oma.cab-address-book+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-feature-handler+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-pcc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-subs-invite+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-user-prefs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.dcd": { + "source": "iana" + }, + "application/vnd.oma.dcdc": { + "source": "iana" + }, + "application/vnd.oma.dd2+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dd2"] + }, + "application/vnd.oma.drm.risd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.group-usage-list+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+tlv": { + "source": "iana" + }, + "application/vnd.oma.pal+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.detailed-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.final-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.groups+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.invocation-descriptor+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.optimized-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.push": { + "source": "iana" + }, + "application/vnd.oma.scidm.messages+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.xcap-directory+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.omads-email+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.omads-file+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.omads-folder+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.omaloc-supl-init": { + "source": "iana" + }, + "application/vnd.onepager": { + "source": "iana" + }, + "application/vnd.onepagertamp": { + "source": "iana" + }, + "application/vnd.onepagertamx": { + "source": "iana" + }, + "application/vnd.onepagertat": { + "source": "iana" + }, + "application/vnd.onepagertatp": { + "source": "iana" + }, + "application/vnd.onepagertatx": { + "source": "iana" + }, + "application/vnd.openblox.game+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openblox.game-binary": { + "source": "iana" + }, + "application/vnd.openeye.oeb": { + "source": "iana" + }, + "application/vnd.openofficeorg.extension": { + "source": "apache", + "extensions": ["oxt"] + }, + "application/vnd.openstreetmap.data+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.custom-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.customxmlproperties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawing+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chart+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.extended-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["pptx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide": { + "source": "iana", + "extensions": ["sldx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow": { + "source": "iana", + "extensions": ["ppsx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tags+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.template": { + "source": "iana", + "extensions": ["potx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": { + "source": "iana", + "compressible": false, + "extensions": ["xlsx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template": { + "source": "iana", + "extensions": ["xltx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.theme+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.themeoverride+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.vmldrawing": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document": { + "source": "iana", + "compressible": false, + "extensions": ["docx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template": { + "source": "iana", + "extensions": ["dotx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.core-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.relationships+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oracle.resource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.orange.indata": { + "source": "iana" + }, + "application/vnd.osa.netdeploy": { + "source": "iana" + }, + "application/vnd.osgeo.mapguide.package": { + "source": "iana", + "extensions": ["mgp"] + }, + "application/vnd.osgi.bundle": { + "source": "iana" + }, + "application/vnd.osgi.dp": { + "source": "iana", + "extensions": ["dp"] + }, + "application/vnd.osgi.subsystem": { + "source": "iana", + "extensions": ["esa"] + }, + "application/vnd.otps.ct-kip+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oxli.countgraph": { + "source": "iana" + }, + "application/vnd.pagerduty+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.palm": { + "source": "iana", + "extensions": ["pdb","pqa","oprc"] + }, + "application/vnd.panoply": { + "source": "iana" + }, + "application/vnd.paos.xml": { + "source": "iana" + }, + "application/vnd.patentdive": { + "source": "iana" + }, + "application/vnd.patientecommsdoc": { + "source": "iana" + }, + "application/vnd.pawaafile": { + "source": "iana", + "extensions": ["paw"] + }, + "application/vnd.pcos": { + "source": "iana" + }, + "application/vnd.pg.format": { + "source": "iana", + "extensions": ["str"] + }, + "application/vnd.pg.osasli": { + "source": "iana", + "extensions": ["ei6"] + }, + "application/vnd.piaccess.application-licence": { + "source": "iana" + }, + "application/vnd.picsel": { + "source": "iana", + "extensions": ["efif"] + }, + "application/vnd.pmi.widget": { + "source": "iana", + "extensions": ["wg"] + }, + "application/vnd.poc.group-advertisement+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.pocketlearn": { + "source": "iana", + "extensions": ["plf"] + }, + "application/vnd.powerbuilder6": { + "source": "iana", + "extensions": ["pbd"] + }, + "application/vnd.powerbuilder6-s": { + "source": "iana" + }, + "application/vnd.powerbuilder7": { + "source": "iana" + }, + "application/vnd.powerbuilder7-s": { + "source": "iana" + }, + "application/vnd.powerbuilder75": { + "source": "iana" + }, + "application/vnd.powerbuilder75-s": { + "source": "iana" + }, + "application/vnd.preminet": { + "source": "iana" + }, + "application/vnd.previewsystems.box": { + "source": "iana", + "extensions": ["box"] + }, + "application/vnd.proteus.magazine": { + "source": "iana", + "extensions": ["mgz"] + }, + "application/vnd.psfs": { + "source": "iana" + }, + "application/vnd.publishare-delta-tree": { + "source": "iana", + "extensions": ["qps"] + }, + "application/vnd.pvi.ptid1": { + "source": "iana", + "extensions": ["ptid"] + }, + "application/vnd.pwg-multiplexed": { + "source": "iana" + }, + "application/vnd.pwg-xhtml-print+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.qualcomm.brew-app-res": { + "source": "iana" + }, + "application/vnd.quarantainenet": { + "source": "iana" + }, + "application/vnd.quark.quarkxpress": { + "source": "iana", + "extensions": ["qxd","qxt","qwd","qwt","qxl","qxb"] + }, + "application/vnd.quobject-quoxdocument": { + "source": "iana" + }, + "application/vnd.radisys.moml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-stream+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-base+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-detect+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-sendrecv+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-group+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-speech+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-transform+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.rainstor.data": { + "source": "iana" + }, + "application/vnd.rapid": { + "source": "iana" + }, + "application/vnd.rar": { + "source": "iana" + }, + "application/vnd.realvnc.bed": { + "source": "iana", + "extensions": ["bed"] + }, + "application/vnd.recordare.musicxml": { + "source": "iana", + "extensions": ["mxl"] + }, + "application/vnd.recordare.musicxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["musicxml"] + }, + "application/vnd.renlearn.rlprint": { + "source": "iana" + }, + "application/vnd.restful+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.rig.cryptonote": { + "source": "iana", + "extensions": ["cryptonote"] + }, + "application/vnd.rim.cod": { + "source": "apache", + "extensions": ["cod"] + }, + "application/vnd.rn-realmedia": { + "source": "apache", + "extensions": ["rm"] + }, + "application/vnd.rn-realmedia-vbr": { + "source": "apache", + "extensions": ["rmvb"] + }, + "application/vnd.route66.link66+xml": { + "source": "iana", + "compressible": true, + "extensions": ["link66"] + }, + "application/vnd.rs-274x": { + "source": "iana" + }, + "application/vnd.ruckus.download": { + "source": "iana" + }, + "application/vnd.s3sms": { + "source": "iana" + }, + "application/vnd.sailingtracker.track": { + "source": "iana", + "extensions": ["st"] + }, + "application/vnd.sbm.cid": { + "source": "iana" + }, + "application/vnd.sbm.mid2": { + "source": "iana" + }, + "application/vnd.scribus": { + "source": "iana" + }, + "application/vnd.sealed.3df": { + "source": "iana" + }, + "application/vnd.sealed.csf": { + "source": "iana" + }, + "application/vnd.sealed.doc": { + "source": "iana" + }, + "application/vnd.sealed.eml": { + "source": "iana" + }, + "application/vnd.sealed.mht": { + "source": "iana" + }, + "application/vnd.sealed.net": { + "source": "iana" + }, + "application/vnd.sealed.ppt": { + "source": "iana" + }, + "application/vnd.sealed.tiff": { + "source": "iana" + }, + "application/vnd.sealed.xls": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.html": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.pdf": { + "source": "iana" + }, + "application/vnd.seemail": { + "source": "iana", + "extensions": ["see"] + }, + "application/vnd.sema": { + "source": "iana", + "extensions": ["sema"] + }, + "application/vnd.semd": { + "source": "iana", + "extensions": ["semd"] + }, + "application/vnd.semf": { + "source": "iana", + "extensions": ["semf"] + }, + "application/vnd.shana.informed.formdata": { + "source": "iana", + "extensions": ["ifm"] + }, + "application/vnd.shana.informed.formtemplate": { + "source": "iana", + "extensions": ["itp"] + }, + "application/vnd.shana.informed.interchange": { + "source": "iana", + "extensions": ["iif"] + }, + "application/vnd.shana.informed.package": { + "source": "iana", + "extensions": ["ipk"] + }, + "application/vnd.shootproof+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.sigrok.session": { + "source": "iana" + }, + "application/vnd.simtech-mindmapper": { + "source": "iana", + "extensions": ["twd","twds"] + }, + "application/vnd.siren+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.smaf": { + "source": "iana", + "extensions": ["mmf"] + }, + "application/vnd.smart.notebook": { + "source": "iana" + }, + "application/vnd.smart.teacher": { + "source": "iana", + "extensions": ["teacher"] + }, + "application/vnd.software602.filler.form+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.software602.filler.form-xml-zip": { + "source": "iana" + }, + "application/vnd.solent.sdkm+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sdkm","sdkd"] + }, + "application/vnd.spotfire.dxp": { + "source": "iana", + "extensions": ["dxp"] + }, + "application/vnd.spotfire.sfs": { + "source": "iana", + "extensions": ["sfs"] + }, + "application/vnd.sqlite3": { + "source": "iana" + }, + "application/vnd.sss-cod": { + "source": "iana" + }, + "application/vnd.sss-dtf": { + "source": "iana" + }, + "application/vnd.sss-ntf": { + "source": "iana" + }, + "application/vnd.stardivision.calc": { + "source": "apache", + "extensions": ["sdc"] + }, + "application/vnd.stardivision.draw": { + "source": "apache", + "extensions": ["sda"] + }, + "application/vnd.stardivision.impress": { + "source": "apache", + "extensions": ["sdd"] + }, + "application/vnd.stardivision.math": { + "source": "apache", + "extensions": ["smf"] + }, + "application/vnd.stardivision.writer": { + "source": "apache", + "extensions": ["sdw","vor"] + }, + "application/vnd.stardivision.writer-global": { + "source": "apache", + "extensions": ["sgl"] + }, + "application/vnd.stepmania.package": { + "source": "iana", + "extensions": ["smzip"] + }, + "application/vnd.stepmania.stepchart": { + "source": "iana", + "extensions": ["sm"] + }, + "application/vnd.street-stream": { + "source": "iana" + }, + "application/vnd.sun.wadl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wadl"] + }, + "application/vnd.sun.xml.calc": { + "source": "apache", + "extensions": ["sxc"] + }, + "application/vnd.sun.xml.calc.template": { + "source": "apache", + "extensions": ["stc"] + }, + "application/vnd.sun.xml.draw": { + "source": "apache", + "extensions": ["sxd"] + }, + "application/vnd.sun.xml.draw.template": { + "source": "apache", + "extensions": ["std"] + }, + "application/vnd.sun.xml.impress": { + "source": "apache", + "extensions": ["sxi"] + }, + "application/vnd.sun.xml.impress.template": { + "source": "apache", + "extensions": ["sti"] + }, + "application/vnd.sun.xml.math": { + "source": "apache", + "extensions": ["sxm"] + }, + "application/vnd.sun.xml.writer": { + "source": "apache", + "extensions": ["sxw"] + }, + "application/vnd.sun.xml.writer.global": { + "source": "apache", + "extensions": ["sxg"] + }, + "application/vnd.sun.xml.writer.template": { + "source": "apache", + "extensions": ["stw"] + }, + "application/vnd.sus-calendar": { + "source": "iana", + "extensions": ["sus","susp"] + }, + "application/vnd.svd": { + "source": "iana", + "extensions": ["svd"] + }, + "application/vnd.swiftview-ics": { + "source": "iana" + }, + "application/vnd.symbian.install": { + "source": "apache", + "extensions": ["sis","sisx"] + }, + "application/vnd.syncml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xsm"] + }, + "application/vnd.syncml.dm+wbxml": { + "source": "iana", + "extensions": ["bdm"] + }, + "application/vnd.syncml.dm+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdm"] + }, + "application/vnd.syncml.dm.notification": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.syncml.dmtnds+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmtnds+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.syncml.ds.notification": { + "source": "iana" + }, + "application/vnd.tableschema+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tao.intent-module-archive": { + "source": "iana", + "extensions": ["tao"] + }, + "application/vnd.tcpdump.pcap": { + "source": "iana", + "extensions": ["pcap","cap","dmp"] + }, + "application/vnd.think-cell.ppttc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tmd.mediaflex.api+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.tml": { + "source": "iana" + }, + "application/vnd.tmobile-livetv": { + "source": "iana", + "extensions": ["tmo"] + }, + "application/vnd.tri.onesource": { + "source": "iana" + }, + "application/vnd.trid.tpt": { + "source": "iana", + "extensions": ["tpt"] + }, + "application/vnd.triscape.mxs": { + "source": "iana", + "extensions": ["mxs"] + }, + "application/vnd.trueapp": { + "source": "iana", + "extensions": ["tra"] + }, + "application/vnd.truedoc": { + "source": "iana" + }, + "application/vnd.ubisoft.webplayer": { + "source": "iana" + }, + "application/vnd.ufdl": { + "source": "iana", + "extensions": ["ufd","ufdl"] + }, + "application/vnd.uiq.theme": { + "source": "iana", + "extensions": ["utz"] + }, + "application/vnd.umajin": { + "source": "iana", + "extensions": ["umj"] + }, + "application/vnd.unity": { + "source": "iana", + "extensions": ["unityweb"] + }, + "application/vnd.uoml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uoml"] + }, + "application/vnd.uplanet.alert": { + "source": "iana" + }, + "application/vnd.uplanet.alert-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.channel": { + "source": "iana" + }, + "application/vnd.uplanet.channel-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.list": { + "source": "iana" + }, + "application/vnd.uplanet.list-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.signal": { + "source": "iana" + }, + "application/vnd.uri-map": { + "source": "iana" + }, + "application/vnd.valve.source.material": { + "source": "iana" + }, + "application/vnd.vcx": { + "source": "iana", + "extensions": ["vcx"] + }, + "application/vnd.vd-study": { + "source": "iana" + }, + "application/vnd.vectorworks": { + "source": "iana" + }, + "application/vnd.vel+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.verimatrix.vcas": { + "source": "iana" + }, + "application/vnd.veryant.thin": { + "source": "iana" + }, + "application/vnd.vidsoft.vidconference": { + "source": "iana" + }, + "application/vnd.visio": { + "source": "iana", + "extensions": ["vsd","vst","vss","vsw"] + }, + "application/vnd.visionary": { + "source": "iana", + "extensions": ["vis"] + }, + "application/vnd.vividence.scriptfile": { + "source": "iana" + }, + "application/vnd.vsf": { + "source": "iana", + "extensions": ["vsf"] + }, + "application/vnd.wap.sic": { + "source": "iana" + }, + "application/vnd.wap.slc": { + "source": "iana" + }, + "application/vnd.wap.wbxml": { + "source": "iana", + "extensions": ["wbxml"] + }, + "application/vnd.wap.wmlc": { + "source": "iana", + "extensions": ["wmlc"] + }, + "application/vnd.wap.wmlscriptc": { + "source": "iana", + "extensions": ["wmlsc"] + }, + "application/vnd.webturbo": { + "source": "iana", + "extensions": ["wtb"] + }, + "application/vnd.wfa.p2p": { + "source": "iana" + }, + "application/vnd.wfa.wsc": { + "source": "iana" + }, + "application/vnd.windows.devicepairing": { + "source": "iana" + }, + "application/vnd.wmc": { + "source": "iana" + }, + "application/vnd.wmf.bootstrap": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica.package": { + "source": "iana" + }, + "application/vnd.wolfram.player": { + "source": "iana", + "extensions": ["nbp"] + }, + "application/vnd.wordperfect": { + "source": "iana", + "extensions": ["wpd"] + }, + "application/vnd.wqd": { + "source": "iana", + "extensions": ["wqd"] + }, + "application/vnd.wrq-hp3000-labelled": { + "source": "iana" + }, + "application/vnd.wt.stf": { + "source": "iana", + "extensions": ["stf"] + }, + "application/vnd.wv.csp+wbxml": { + "source": "iana" + }, + "application/vnd.wv.csp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.wv.ssp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xacml+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.xara": { + "source": "iana", + "extensions": ["xar"] + }, + "application/vnd.xfdl": { + "source": "iana", + "extensions": ["xfdl"] + }, + "application/vnd.xfdl.webform": { + "source": "iana" + }, + "application/vnd.xmi+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xmpie.cpkg": { + "source": "iana" + }, + "application/vnd.xmpie.dpkg": { + "source": "iana" + }, + "application/vnd.xmpie.plan": { + "source": "iana" + }, + "application/vnd.xmpie.ppkg": { + "source": "iana" + }, + "application/vnd.xmpie.xlim": { + "source": "iana" + }, + "application/vnd.yamaha.hv-dic": { + "source": "iana", + "extensions": ["hvd"] + }, + "application/vnd.yamaha.hv-script": { + "source": "iana", + "extensions": ["hvs"] + }, + "application/vnd.yamaha.hv-voice": { + "source": "iana", + "extensions": ["hvp"] + }, + "application/vnd.yamaha.openscoreformat": { + "source": "iana", + "extensions": ["osf"] + }, + "application/vnd.yamaha.openscoreformat.osfpvg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["osfpvg"] + }, + "application/vnd.yamaha.remote-setup": { + "source": "iana" + }, + "application/vnd.yamaha.smaf-audio": { + "source": "iana", + "extensions": ["saf"] + }, + "application/vnd.yamaha.smaf-phrase": { + "source": "iana", + "extensions": ["spf"] + }, + "application/vnd.yamaha.through-ngn": { + "source": "iana" + }, + "application/vnd.yamaha.tunnel-udpencap": { + "source": "iana" + }, + "application/vnd.yaoweme": { + "source": "iana" + }, + "application/vnd.yellowriver-custom-menu": { + "source": "iana", + "extensions": ["cmp"] + }, + "application/vnd.youtube.yt": { + "source": "iana" + }, + "application/vnd.zul": { + "source": "iana", + "extensions": ["zir","zirz"] + }, + "application/vnd.zzazz.deck+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zaz"] + }, + "application/voicexml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["vxml"] + }, + "application/voucher-cms+json": { + "source": "iana", + "compressible": true + }, + "application/vq-rtcpxr": { + "source": "iana" + }, + "application/wasm": { + "compressible": true, + "extensions": ["wasm"] + }, + "application/watcherinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/webpush-options+json": { + "source": "iana", + "compressible": true + }, + "application/whoispp-query": { + "source": "iana" + }, + "application/whoispp-response": { + "source": "iana" + }, + "application/widget": { + "source": "iana", + "extensions": ["wgt"] + }, + "application/winhlp": { + "source": "apache", + "extensions": ["hlp"] + }, + "application/wita": { + "source": "iana" + }, + "application/wordperfect5.1": { + "source": "iana" + }, + "application/wsdl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wsdl"] + }, + "application/wspolicy+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wspolicy"] + }, + "application/x-7z-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["7z"] + }, + "application/x-abiword": { + "source": "apache", + "extensions": ["abw"] + }, + "application/x-ace-compressed": { + "source": "apache", + "extensions": ["ace"] + }, + "application/x-amf": { + "source": "apache" + }, + "application/x-apple-diskimage": { + "source": "apache", + "extensions": ["dmg"] + }, + "application/x-arj": { + "compressible": false, + "extensions": ["arj"] + }, + "application/x-authorware-bin": { + "source": "apache", + "extensions": ["aab","x32","u32","vox"] + }, + "application/x-authorware-map": { + "source": "apache", + "extensions": ["aam"] + }, + "application/x-authorware-seg": { + "source": "apache", + "extensions": ["aas"] + }, + "application/x-bcpio": { + "source": "apache", + "extensions": ["bcpio"] + }, + "application/x-bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/x-bittorrent": { + "source": "apache", + "extensions": ["torrent"] + }, + "application/x-blorb": { + "source": "apache", + "extensions": ["blb","blorb"] + }, + "application/x-bzip": { + "source": "apache", + "compressible": false, + "extensions": ["bz"] + }, + "application/x-bzip2": { + "source": "apache", + "compressible": false, + "extensions": ["bz2","boz"] + }, + "application/x-cbr": { + "source": "apache", + "extensions": ["cbr","cba","cbt","cbz","cb7"] + }, + "application/x-cdlink": { + "source": "apache", + "extensions": ["vcd"] + }, + "application/x-cfs-compressed": { + "source": "apache", + "extensions": ["cfs"] + }, + "application/x-chat": { + "source": "apache", + "extensions": ["chat"] + }, + "application/x-chess-pgn": { + "source": "apache", + "extensions": ["pgn"] + }, + "application/x-chrome-extension": { + "extensions": ["crx"] + }, + "application/x-cocoa": { + "source": "nginx", + "extensions": ["cco"] + }, + "application/x-compress": { + "source": "apache" + }, + "application/x-conference": { + "source": "apache", + "extensions": ["nsc"] + }, + "application/x-cpio": { + "source": "apache", + "extensions": ["cpio"] + }, + "application/x-csh": { + "source": "apache", + "extensions": ["csh"] + }, + "application/x-deb": { + "compressible": false + }, + "application/x-debian-package": { + "source": "apache", + "extensions": ["deb","udeb"] + }, + "application/x-dgc-compressed": { + "source": "apache", + "extensions": ["dgc"] + }, + "application/x-director": { + "source": "apache", + "extensions": ["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"] + }, + "application/x-doom": { + "source": "apache", + "extensions": ["wad"] + }, + "application/x-dtbncx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ncx"] + }, + "application/x-dtbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dtb"] + }, + "application/x-dtbresource+xml": { + "source": "apache", + "compressible": true, + "extensions": ["res"] + }, + "application/x-dvi": { + "source": "apache", + "compressible": false, + "extensions": ["dvi"] + }, + "application/x-envoy": { + "source": "apache", + "extensions": ["evy"] + }, + "application/x-eva": { + "source": "apache", + "extensions": ["eva"] + }, + "application/x-font-bdf": { + "source": "apache", + "extensions": ["bdf"] + }, + "application/x-font-dos": { + "source": "apache" + }, + "application/x-font-framemaker": { + "source": "apache" + }, + "application/x-font-ghostscript": { + "source": "apache", + "extensions": ["gsf"] + }, + "application/x-font-libgrx": { + "source": "apache" + }, + "application/x-font-linux-psf": { + "source": "apache", + "extensions": ["psf"] + }, + "application/x-font-pcf": { + "source": "apache", + "extensions": ["pcf"] + }, + "application/x-font-snf": { + "source": "apache", + "extensions": ["snf"] + }, + "application/x-font-speedo": { + "source": "apache" + }, + "application/x-font-sunos-news": { + "source": "apache" + }, + "application/x-font-type1": { + "source": "apache", + "extensions": ["pfa","pfb","pfm","afm"] + }, + "application/x-font-vfont": { + "source": "apache" + }, + "application/x-freearc": { + "source": "apache", + "extensions": ["arc"] + }, + "application/x-futuresplash": { + "source": "apache", + "extensions": ["spl"] + }, + "application/x-gca-compressed": { + "source": "apache", + "extensions": ["gca"] + }, + "application/x-glulx": { + "source": "apache", + "extensions": ["ulx"] + }, + "application/x-gnumeric": { + "source": "apache", + "extensions": ["gnumeric"] + }, + "application/x-gramps-xml": { + "source": "apache", + "extensions": ["gramps"] + }, + "application/x-gtar": { + "source": "apache", + "extensions": ["gtar"] + }, + "application/x-gzip": { + "source": "apache" + }, + "application/x-hdf": { + "source": "apache", + "extensions": ["hdf"] + }, + "application/x-httpd-php": { + "compressible": true, + "extensions": ["php"] + }, + "application/x-install-instructions": { + "source": "apache", + "extensions": ["install"] + }, + "application/x-iso9660-image": { + "source": "apache", + "extensions": ["iso"] + }, + "application/x-java-archive-diff": { + "source": "nginx", + "extensions": ["jardiff"] + }, + "application/x-java-jnlp-file": { + "source": "apache", + "compressible": false, + "extensions": ["jnlp"] + }, + "application/x-javascript": { + "compressible": true + }, + "application/x-latex": { + "source": "apache", + "compressible": false, + "extensions": ["latex"] + }, + "application/x-lua-bytecode": { + "extensions": ["luac"] + }, + "application/x-lzh-compressed": { + "source": "apache", + "extensions": ["lzh","lha"] + }, + "application/x-makeself": { + "source": "nginx", + "extensions": ["run"] + }, + "application/x-mie": { + "source": "apache", + "extensions": ["mie"] + }, + "application/x-mobipocket-ebook": { + "source": "apache", + "extensions": ["prc","mobi"] + }, + "application/x-mpegurl": { + "compressible": false + }, + "application/x-ms-application": { + "source": "apache", + "extensions": ["application"] + }, + "application/x-ms-shortcut": { + "source": "apache", + "extensions": ["lnk"] + }, + "application/x-ms-wmd": { + "source": "apache", + "extensions": ["wmd"] + }, + "application/x-ms-wmz": { + "source": "apache", + "extensions": ["wmz"] + }, + "application/x-ms-xbap": { + "source": "apache", + "extensions": ["xbap"] + }, + "application/x-msaccess": { + "source": "apache", + "extensions": ["mdb"] + }, + "application/x-msbinder": { + "source": "apache", + "extensions": ["obd"] + }, + "application/x-mscardfile": { + "source": "apache", + "extensions": ["crd"] + }, + "application/x-msclip": { + "source": "apache", + "extensions": ["clp"] + }, + "application/x-msdos-program": { + "extensions": ["exe"] + }, + "application/x-msdownload": { + "source": "apache", + "extensions": ["exe","dll","com","bat","msi"] + }, + "application/x-msmediaview": { + "source": "apache", + "extensions": ["mvb","m13","m14"] + }, + "application/x-msmetafile": { + "source": "apache", + "extensions": ["wmf","wmz","emf","emz"] + }, + "application/x-msmoney": { + "source": "apache", + "extensions": ["mny"] + }, + "application/x-mspublisher": { + "source": "apache", + "extensions": ["pub"] + }, + "application/x-msschedule": { + "source": "apache", + "extensions": ["scd"] + }, + "application/x-msterminal": { + "source": "apache", + "extensions": ["trm"] + }, + "application/x-mswrite": { + "source": "apache", + "extensions": ["wri"] + }, + "application/x-netcdf": { + "source": "apache", + "extensions": ["nc","cdf"] + }, + "application/x-ns-proxy-autoconfig": { + "compressible": true, + "extensions": ["pac"] + }, + "application/x-nzb": { + "source": "apache", + "extensions": ["nzb"] + }, + "application/x-perl": { + "source": "nginx", + "extensions": ["pl","pm"] + }, + "application/x-pilot": { + "source": "nginx", + "extensions": ["prc","pdb"] + }, + "application/x-pkcs12": { + "source": "apache", + "compressible": false, + "extensions": ["p12","pfx"] + }, + "application/x-pkcs7-certificates": { + "source": "apache", + "extensions": ["p7b","spc"] + }, + "application/x-pkcs7-certreqresp": { + "source": "apache", + "extensions": ["p7r"] + }, + "application/x-rar-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["rar"] + }, + "application/x-redhat-package-manager": { + "source": "nginx", + "extensions": ["rpm"] + }, + "application/x-research-info-systems": { + "source": "apache", + "extensions": ["ris"] + }, + "application/x-sea": { + "source": "nginx", + "extensions": ["sea"] + }, + "application/x-sh": { + "source": "apache", + "compressible": true, + "extensions": ["sh"] + }, + "application/x-shar": { + "source": "apache", + "extensions": ["shar"] + }, + "application/x-shockwave-flash": { + "source": "apache", + "compressible": false, + "extensions": ["swf"] + }, + "application/x-silverlight-app": { + "source": "apache", + "extensions": ["xap"] + }, + "application/x-sql": { + "source": "apache", + "extensions": ["sql"] + }, + "application/x-stuffit": { + "source": "apache", + "compressible": false, + "extensions": ["sit"] + }, + "application/x-stuffitx": { + "source": "apache", + "extensions": ["sitx"] + }, + "application/x-subrip": { + "source": "apache", + "extensions": ["srt"] + }, + "application/x-sv4cpio": { + "source": "apache", + "extensions": ["sv4cpio"] + }, + "application/x-sv4crc": { + "source": "apache", + "extensions": ["sv4crc"] + }, + "application/x-t3vm-image": { + "source": "apache", + "extensions": ["t3"] + }, + "application/x-tads": { + "source": "apache", + "extensions": ["gam"] + }, + "application/x-tar": { + "source": "apache", + "compressible": true, + "extensions": ["tar"] + }, + "application/x-tcl": { + "source": "apache", + "extensions": ["tcl","tk"] + }, + "application/x-tex": { + "source": "apache", + "extensions": ["tex"] + }, + "application/x-tex-tfm": { + "source": "apache", + "extensions": ["tfm"] + }, + "application/x-texinfo": { + "source": "apache", + "extensions": ["texinfo","texi"] + }, + "application/x-tgif": { + "source": "apache", + "extensions": ["obj"] + }, + "application/x-ustar": { + "source": "apache", + "extensions": ["ustar"] + }, + "application/x-virtualbox-hdd": { + "compressible": true, + "extensions": ["hdd"] + }, + "application/x-virtualbox-ova": { + "compressible": true, + "extensions": ["ova"] + }, + "application/x-virtualbox-ovf": { + "compressible": true, + "extensions": ["ovf"] + }, + "application/x-virtualbox-vbox": { + "compressible": true, + "extensions": ["vbox"] + }, + "application/x-virtualbox-vbox-extpack": { + "compressible": false, + "extensions": ["vbox-extpack"] + }, + "application/x-virtualbox-vdi": { + "compressible": true, + "extensions": ["vdi"] + }, + "application/x-virtualbox-vhd": { + "compressible": true, + "extensions": ["vhd"] + }, + "application/x-virtualbox-vmdk": { + "compressible": true, + "extensions": ["vmdk"] + }, + "application/x-wais-source": { + "source": "apache", + "extensions": ["src"] + }, + "application/x-web-app-manifest+json": { + "compressible": true, + "extensions": ["webapp"] + }, + "application/x-www-form-urlencoded": { + "source": "iana", + "compressible": true + }, + "application/x-x509-ca-cert": { + "source": "apache", + "extensions": ["der","crt","pem"] + }, + "application/x-xfig": { + "source": "apache", + "extensions": ["fig"] + }, + "application/x-xliff+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xlf"] + }, + "application/x-xpinstall": { + "source": "apache", + "compressible": false, + "extensions": ["xpi"] + }, + "application/x-xz": { + "source": "apache", + "extensions": ["xz"] + }, + "application/x-zmachine": { + "source": "apache", + "extensions": ["z1","z2","z3","z4","z5","z6","z7","z8"] + }, + "application/x400-bp": { + "source": "iana" + }, + "application/xacml+xml": { + "source": "iana", + "compressible": true + }, + "application/xaml+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xaml"] + }, + "application/xcap-att+xml": { + "source": "iana", + "compressible": true + }, + "application/xcap-caps+xml": { + "source": "iana", + "compressible": true + }, + "application/xcap-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdf"] + }, + "application/xcap-el+xml": { + "source": "iana", + "compressible": true + }, + "application/xcap-error+xml": { + "source": "iana", + "compressible": true + }, + "application/xcap-ns+xml": { + "source": "iana", + "compressible": true + }, + "application/xcon-conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/xcon-conference-info-diff+xml": { + "source": "iana", + "compressible": true + }, + "application/xenc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xenc"] + }, + "application/xhtml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xhtml","xht"] + }, + "application/xhtml-voice+xml": { + "source": "apache", + "compressible": true + }, + "application/xliff+xml": { + "source": "iana", + "compressible": true + }, + "application/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml","xsl","xsd","rng"] + }, + "application/xml-dtd": { + "source": "iana", + "compressible": true, + "extensions": ["dtd"] + }, + "application/xml-external-parsed-entity": { + "source": "iana" + }, + "application/xml-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/xmpp+xml": { + "source": "iana", + "compressible": true + }, + "application/xop+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xop"] + }, + "application/xproc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xpl"] + }, + "application/xslt+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xslt"] + }, + "application/xspf+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xspf"] + }, + "application/xv+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mxml","xhvml","xvml","xvm"] + }, + "application/yang": { + "source": "iana", + "extensions": ["yang"] + }, + "application/yang-data+json": { + "source": "iana", + "compressible": true + }, + "application/yang-data+xml": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+json": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/yin+xml": { + "source": "iana", + "compressible": true, + "extensions": ["yin"] + }, + "application/zip": { + "source": "iana", + "compressible": false, + "extensions": ["zip"] + }, + "application/zlib": { + "source": "iana" + }, + "application/zstd": { + "source": "iana" + }, + "audio/1d-interleaved-parityfec": { + "source": "iana" + }, + "audio/32kadpcm": { + "source": "iana" + }, + "audio/3gpp": { + "source": "iana", + "compressible": false, + "extensions": ["3gpp"] + }, + "audio/3gpp2": { + "source": "iana" + }, + "audio/aac": { + "source": "iana" + }, + "audio/ac3": { + "source": "iana" + }, + "audio/adpcm": { + "source": "apache", + "extensions": ["adp"] + }, + "audio/amr": { + "source": "iana" + }, + "audio/amr-wb": { + "source": "iana" + }, + "audio/amr-wb+": { + "source": "iana" + }, + "audio/aptx": { + "source": "iana" + }, + "audio/asc": { + "source": "iana" + }, + "audio/atrac-advanced-lossless": { + "source": "iana" + }, + "audio/atrac-x": { + "source": "iana" + }, + "audio/atrac3": { + "source": "iana" + }, + "audio/basic": { + "source": "iana", + "compressible": false, + "extensions": ["au","snd"] + }, + "audio/bv16": { + "source": "iana" + }, + "audio/bv32": { + "source": "iana" + }, + "audio/clearmode": { + "source": "iana" + }, + "audio/cn": { + "source": "iana" + }, + "audio/dat12": { + "source": "iana" + }, + "audio/dls": { + "source": "iana" + }, + "audio/dsr-es201108": { + "source": "iana" + }, + "audio/dsr-es202050": { + "source": "iana" + }, + "audio/dsr-es202211": { + "source": "iana" + }, + "audio/dsr-es202212": { + "source": "iana" + }, + "audio/dv": { + "source": "iana" + }, + "audio/dvi4": { + "source": "iana" + }, + "audio/eac3": { + "source": "iana" + }, + "audio/encaprtp": { + "source": "iana" + }, + "audio/evrc": { + "source": "iana" + }, + "audio/evrc-qcp": { + "source": "iana" + }, + "audio/evrc0": { + "source": "iana" + }, + "audio/evrc1": { + "source": "iana" + }, + "audio/evrcb": { + "source": "iana" + }, + "audio/evrcb0": { + "source": "iana" + }, + "audio/evrcb1": { + "source": "iana" + }, + "audio/evrcnw": { + "source": "iana" + }, + "audio/evrcnw0": { + "source": "iana" + }, + "audio/evrcnw1": { + "source": "iana" + }, + "audio/evrcwb": { + "source": "iana" + }, + "audio/evrcwb0": { + "source": "iana" + }, + "audio/evrcwb1": { + "source": "iana" + }, + "audio/evs": { + "source": "iana" + }, + "audio/fwdred": { + "source": "iana" + }, + "audio/g711-0": { + "source": "iana" + }, + "audio/g719": { + "source": "iana" + }, + "audio/g722": { + "source": "iana" + }, + "audio/g7221": { + "source": "iana" + }, + "audio/g723": { + "source": "iana" + }, + "audio/g726-16": { + "source": "iana" + }, + "audio/g726-24": { + "source": "iana" + }, + "audio/g726-32": { + "source": "iana" + }, + "audio/g726-40": { + "source": "iana" + }, + "audio/g728": { + "source": "iana" + }, + "audio/g729": { + "source": "iana" + }, + "audio/g7291": { + "source": "iana" + }, + "audio/g729d": { + "source": "iana" + }, + "audio/g729e": { + "source": "iana" + }, + "audio/gsm": { + "source": "iana" + }, + "audio/gsm-efr": { + "source": "iana" + }, + "audio/gsm-hr-08": { + "source": "iana" + }, + "audio/ilbc": { + "source": "iana" + }, + "audio/ip-mr_v2.5": { + "source": "iana" + }, + "audio/isac": { + "source": "apache" + }, + "audio/l16": { + "source": "iana" + }, + "audio/l20": { + "source": "iana" + }, + "audio/l24": { + "source": "iana", + "compressible": false + }, + "audio/l8": { + "source": "iana" + }, + "audio/lpc": { + "source": "iana" + }, + "audio/melp": { + "source": "iana" + }, + "audio/melp1200": { + "source": "iana" + }, + "audio/melp2400": { + "source": "iana" + }, + "audio/melp600": { + "source": "iana" + }, + "audio/midi": { + "source": "apache", + "extensions": ["mid","midi","kar","rmi"] + }, + "audio/mobile-xmf": { + "source": "iana" + }, + "audio/mp3": { + "compressible": false, + "extensions": ["mp3"] + }, + "audio/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["m4a","mp4a"] + }, + "audio/mp4a-latm": { + "source": "iana" + }, + "audio/mpa": { + "source": "iana" + }, + "audio/mpa-robust": { + "source": "iana" + }, + "audio/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpga","mp2","mp2a","mp3","m2a","m3a"] + }, + "audio/mpeg4-generic": { + "source": "iana" + }, + "audio/musepack": { + "source": "apache" + }, + "audio/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["oga","ogg","spx"] + }, + "audio/opus": { + "source": "iana" + }, + "audio/parityfec": { + "source": "iana" + }, + "audio/pcma": { + "source": "iana" + }, + "audio/pcma-wb": { + "source": "iana" + }, + "audio/pcmu": { + "source": "iana" + }, + "audio/pcmu-wb": { + "source": "iana" + }, + "audio/prs.sid": { + "source": "iana" + }, + "audio/qcelp": { + "source": "iana" + }, + "audio/raptorfec": { + "source": "iana" + }, + "audio/red": { + "source": "iana" + }, + "audio/rtp-enc-aescm128": { + "source": "iana" + }, + "audio/rtp-midi": { + "source": "iana" + }, + "audio/rtploopback": { + "source": "iana" + }, + "audio/rtx": { + "source": "iana" + }, + "audio/s3m": { + "source": "apache", + "extensions": ["s3m"] + }, + "audio/silk": { + "source": "apache", + "extensions": ["sil"] + }, + "audio/smv": { + "source": "iana" + }, + "audio/smv-qcp": { + "source": "iana" + }, + "audio/smv0": { + "source": "iana" + }, + "audio/sp-midi": { + "source": "iana" + }, + "audio/speex": { + "source": "iana" + }, + "audio/t140c": { + "source": "iana" + }, + "audio/t38": { + "source": "iana" + }, + "audio/telephone-event": { + "source": "iana" + }, + "audio/tetra_acelp": { + "source": "iana" + }, + "audio/tone": { + "source": "iana" + }, + "audio/uemclip": { + "source": "iana" + }, + "audio/ulpfec": { + "source": "iana" + }, + "audio/usac": { + "source": "iana" + }, + "audio/vdvi": { + "source": "iana" + }, + "audio/vmr-wb": { + "source": "iana" + }, + "audio/vnd.3gpp.iufp": { + "source": "iana" + }, + "audio/vnd.4sb": { + "source": "iana" + }, + "audio/vnd.audiokoz": { + "source": "iana" + }, + "audio/vnd.celp": { + "source": "iana" + }, + "audio/vnd.cisco.nse": { + "source": "iana" + }, + "audio/vnd.cmles.radio-events": { + "source": "iana" + }, + "audio/vnd.cns.anp1": { + "source": "iana" + }, + "audio/vnd.cns.inf1": { + "source": "iana" + }, + "audio/vnd.dece.audio": { + "source": "iana", + "extensions": ["uva","uvva"] + }, + "audio/vnd.digital-winds": { + "source": "iana", + "extensions": ["eol"] + }, + "audio/vnd.dlna.adts": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.1": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.2": { + "source": "iana" + }, + "audio/vnd.dolby.mlp": { + "source": "iana" + }, + "audio/vnd.dolby.mps": { + "source": "iana" + }, + "audio/vnd.dolby.pl2": { + "source": "iana" + }, + "audio/vnd.dolby.pl2x": { + "source": "iana" + }, + "audio/vnd.dolby.pl2z": { + "source": "iana" + }, + "audio/vnd.dolby.pulse.1": { + "source": "iana" + }, + "audio/vnd.dra": { + "source": "iana", + "extensions": ["dra"] + }, + "audio/vnd.dts": { + "source": "iana", + "extensions": ["dts"] + }, + "audio/vnd.dts.hd": { + "source": "iana", + "extensions": ["dtshd"] + }, + "audio/vnd.dts.uhd": { + "source": "iana" + }, + "audio/vnd.dvb.file": { + "source": "iana" + }, + "audio/vnd.everad.plj": { + "source": "iana" + }, + "audio/vnd.hns.audio": { + "source": "iana" + }, + "audio/vnd.lucent.voice": { + "source": "iana", + "extensions": ["lvp"] + }, + "audio/vnd.ms-playready.media.pya": { + "source": "iana", + "extensions": ["pya"] + }, + "audio/vnd.nokia.mobile-xmf": { + "source": "iana" + }, + "audio/vnd.nortel.vbk": { + "source": "iana" + }, + "audio/vnd.nuera.ecelp4800": { + "source": "iana", + "extensions": ["ecelp4800"] + }, + "audio/vnd.nuera.ecelp7470": { + "source": "iana", + "extensions": ["ecelp7470"] + }, + "audio/vnd.nuera.ecelp9600": { + "source": "iana", + "extensions": ["ecelp9600"] + }, + "audio/vnd.octel.sbc": { + "source": "iana" + }, + "audio/vnd.presonus.multitrack": { + "source": "iana" + }, + "audio/vnd.qcelp": { + "source": "iana" + }, + "audio/vnd.rhetorex.32kadpcm": { + "source": "iana" + }, + "audio/vnd.rip": { + "source": "iana", + "extensions": ["rip"] + }, + "audio/vnd.rn-realaudio": { + "compressible": false + }, + "audio/vnd.sealedmedia.softseal.mpeg": { + "source": "iana" + }, + "audio/vnd.vmx.cvsd": { + "source": "iana" + }, + "audio/vnd.wave": { + "compressible": false + }, + "audio/vorbis": { + "source": "iana", + "compressible": false + }, + "audio/vorbis-config": { + "source": "iana" + }, + "audio/wav": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/wave": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/webm": { + "source": "apache", + "compressible": false, + "extensions": ["weba"] + }, + "audio/x-aac": { + "source": "apache", + "compressible": false, + "extensions": ["aac"] + }, + "audio/x-aiff": { + "source": "apache", + "extensions": ["aif","aiff","aifc"] + }, + "audio/x-caf": { + "source": "apache", + "compressible": false, + "extensions": ["caf"] + }, + "audio/x-flac": { + "source": "apache", + "extensions": ["flac"] + }, + "audio/x-m4a": { + "source": "nginx", + "extensions": ["m4a"] + }, + "audio/x-matroska": { + "source": "apache", + "extensions": ["mka"] + }, + "audio/x-mpegurl": { + "source": "apache", + "extensions": ["m3u"] + }, + "audio/x-ms-wax": { + "source": "apache", + "extensions": ["wax"] + }, + "audio/x-ms-wma": { + "source": "apache", + "extensions": ["wma"] + }, + "audio/x-pn-realaudio": { + "source": "apache", + "extensions": ["ram","ra"] + }, + "audio/x-pn-realaudio-plugin": { + "source": "apache", + "extensions": ["rmp"] + }, + "audio/x-realaudio": { + "source": "nginx", + "extensions": ["ra"] + }, + "audio/x-tta": { + "source": "apache" + }, + "audio/x-wav": { + "source": "apache", + "extensions": ["wav"] + }, + "audio/xm": { + "source": "apache", + "extensions": ["xm"] + }, + "chemical/x-cdx": { + "source": "apache", + "extensions": ["cdx"] + }, + "chemical/x-cif": { + "source": "apache", + "extensions": ["cif"] + }, + "chemical/x-cmdf": { + "source": "apache", + "extensions": ["cmdf"] + }, + "chemical/x-cml": { + "source": "apache", + "extensions": ["cml"] + }, + "chemical/x-csml": { + "source": "apache", + "extensions": ["csml"] + }, + "chemical/x-pdb": { + "source": "apache" + }, + "chemical/x-xyz": { + "source": "apache", + "extensions": ["xyz"] + }, + "font/collection": { + "source": "iana", + "extensions": ["ttc"] + }, + "font/otf": { + "source": "iana", + "compressible": true, + "extensions": ["otf"] + }, + "font/sfnt": { + "source": "iana" + }, + "font/ttf": { + "source": "iana", + "extensions": ["ttf"] + }, + "font/woff": { + "source": "iana", + "extensions": ["woff"] + }, + "font/woff2": { + "source": "iana", + "extensions": ["woff2"] + }, + "image/aces": { + "source": "iana", + "extensions": ["exr"] + }, + "image/apng": { + "compressible": false, + "extensions": ["apng"] + }, + "image/avci": { + "source": "iana" + }, + "image/avcs": { + "source": "iana" + }, + "image/bmp": { + "source": "iana", + "compressible": true, + "extensions": ["bmp"] + }, + "image/cgm": { + "source": "iana", + "extensions": ["cgm"] + }, + "image/dicom-rle": { + "source": "iana", + "extensions": ["drle"] + }, + "image/emf": { + "source": "iana", + "extensions": ["emf"] + }, + "image/fits": { + "source": "iana", + "extensions": ["fits"] + }, + "image/g3fax": { + "source": "iana", + "extensions": ["g3"] + }, + "image/gif": { + "source": "iana", + "compressible": false, + "extensions": ["gif"] + }, + "image/heic": { + "source": "iana", + "extensions": ["heic"] + }, + "image/heic-sequence": { + "source": "iana", + "extensions": ["heics"] + }, + "image/heif": { + "source": "iana", + "extensions": ["heif"] + }, + "image/heif-sequence": { + "source": "iana", + "extensions": ["heifs"] + }, + "image/ief": { + "source": "iana", + "extensions": ["ief"] + }, + "image/jls": { + "source": "iana", + "extensions": ["jls"] + }, + "image/jp2": { + "source": "iana", + "compressible": false, + "extensions": ["jp2","jpg2"] + }, + "image/jpeg": { + "source": "iana", + "compressible": false, + "extensions": ["jpeg","jpg","jpe"] + }, + "image/jpm": { + "source": "iana", + "compressible": false, + "extensions": ["jpm"] + }, + "image/jpx": { + "source": "iana", + "compressible": false, + "extensions": ["jpx","jpf"] + }, + "image/jxr": { + "source": "iana", + "extensions": ["jxr"] + }, + "image/ktx": { + "source": "iana", + "extensions": ["ktx"] + }, + "image/naplps": { + "source": "iana" + }, + "image/pjpeg": { + "compressible": false + }, + "image/png": { + "source": "iana", + "compressible": false, + "extensions": ["png"] + }, + "image/prs.btif": { + "source": "iana", + "extensions": ["btif"] + }, + "image/prs.pti": { + "source": "iana", + "extensions": ["pti"] + }, + "image/pwg-raster": { + "source": "iana" + }, + "image/sgi": { + "source": "apache", + "extensions": ["sgi"] + }, + "image/svg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["svg","svgz"] + }, + "image/t38": { + "source": "iana", + "extensions": ["t38"] + }, + "image/tiff": { + "source": "iana", + "compressible": false, + "extensions": ["tif","tiff"] + }, + "image/tiff-fx": { + "source": "iana", + "extensions": ["tfx"] + }, + "image/vnd.adobe.photoshop": { + "source": "iana", + "compressible": true, + "extensions": ["psd"] + }, + "image/vnd.airzip.accelerator.azv": { + "source": "iana", + "extensions": ["azv"] + }, + "image/vnd.cns.inf2": { + "source": "iana" + }, + "image/vnd.dece.graphic": { + "source": "iana", + "extensions": ["uvi","uvvi","uvg","uvvg"] + }, + "image/vnd.djvu": { + "source": "iana", + "extensions": ["djvu","djv"] + }, + "image/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "image/vnd.dwg": { + "source": "iana", + "extensions": ["dwg"] + }, + "image/vnd.dxf": { + "source": "iana", + "extensions": ["dxf"] + }, + "image/vnd.fastbidsheet": { + "source": "iana", + "extensions": ["fbs"] + }, + "image/vnd.fpx": { + "source": "iana", + "extensions": ["fpx"] + }, + "image/vnd.fst": { + "source": "iana", + "extensions": ["fst"] + }, + "image/vnd.fujixerox.edmics-mmr": { + "source": "iana", + "extensions": ["mmr"] + }, + "image/vnd.fujixerox.edmics-rlc": { + "source": "iana", + "extensions": ["rlc"] + }, + "image/vnd.globalgraphics.pgb": { + "source": "iana" + }, + "image/vnd.microsoft.icon": { + "source": "iana", + "extensions": ["ico"] + }, + "image/vnd.mix": { + "source": "iana" + }, + "image/vnd.mozilla.apng": { + "source": "iana" + }, + "image/vnd.ms-modi": { + "source": "iana", + "extensions": ["mdi"] + }, + "image/vnd.ms-photo": { + "source": "apache", + "extensions": ["wdp"] + }, + "image/vnd.net-fpx": { + "source": "iana", + "extensions": ["npx"] + }, + "image/vnd.radiance": { + "source": "iana" + }, + "image/vnd.sealed.png": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.gif": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.jpg": { + "source": "iana" + }, + "image/vnd.svf": { + "source": "iana" + }, + "image/vnd.tencent.tap": { + "source": "iana", + "extensions": ["tap"] + }, + "image/vnd.valve.source.texture": { + "source": "iana", + "extensions": ["vtf"] + }, + "image/vnd.wap.wbmp": { + "source": "iana", + "extensions": ["wbmp"] + }, + "image/vnd.xiff": { + "source": "iana", + "extensions": ["xif"] + }, + "image/vnd.zbrush.pcx": { + "source": "iana", + "extensions": ["pcx"] + }, + "image/webp": { + "source": "apache", + "extensions": ["webp"] + }, + "image/wmf": { + "source": "iana", + "extensions": ["wmf"] + }, + "image/x-3ds": { + "source": "apache", + "extensions": ["3ds"] + }, + "image/x-cmu-raster": { + "source": "apache", + "extensions": ["ras"] + }, + "image/x-cmx": { + "source": "apache", + "extensions": ["cmx"] + }, + "image/x-freehand": { + "source": "apache", + "extensions": ["fh","fhc","fh4","fh5","fh7"] + }, + "image/x-icon": { + "source": "apache", + "compressible": true, + "extensions": ["ico"] + }, + "image/x-jng": { + "source": "nginx", + "extensions": ["jng"] + }, + "image/x-mrsid-image": { + "source": "apache", + "extensions": ["sid"] + }, + "image/x-ms-bmp": { + "source": "nginx", + "compressible": true, + "extensions": ["bmp"] + }, + "image/x-pcx": { + "source": "apache", + "extensions": ["pcx"] + }, + "image/x-pict": { + "source": "apache", + "extensions": ["pic","pct"] + }, + "image/x-portable-anymap": { + "source": "apache", + "extensions": ["pnm"] + }, + "image/x-portable-bitmap": { + "source": "apache", + "extensions": ["pbm"] + }, + "image/x-portable-graymap": { + "source": "apache", + "extensions": ["pgm"] + }, + "image/x-portable-pixmap": { + "source": "apache", + "extensions": ["ppm"] + }, + "image/x-rgb": { + "source": "apache", + "extensions": ["rgb"] + }, + "image/x-tga": { + "source": "apache", + "extensions": ["tga"] + }, + "image/x-xbitmap": { + "source": "apache", + "extensions": ["xbm"] + }, + "image/x-xcf": { + "compressible": false + }, + "image/x-xpixmap": { + "source": "apache", + "extensions": ["xpm"] + }, + "image/x-xwindowdump": { + "source": "apache", + "extensions": ["xwd"] + }, + "message/cpim": { + "source": "iana" + }, + "message/delivery-status": { + "source": "iana" + }, + "message/disposition-notification": { + "source": "iana", + "extensions": [ + "disposition-notification" + ] + }, + "message/external-body": { + "source": "iana" + }, + "message/feedback-report": { + "source": "iana" + }, + "message/global": { + "source": "iana", + "extensions": ["u8msg"] + }, + "message/global-delivery-status": { + "source": "iana", + "extensions": ["u8dsn"] + }, + "message/global-disposition-notification": { + "source": "iana", + "extensions": ["u8mdn"] + }, + "message/global-headers": { + "source": "iana", + "extensions": ["u8hdr"] + }, + "message/http": { + "source": "iana", + "compressible": false + }, + "message/imdn+xml": { + "source": "iana", + "compressible": true + }, + "message/news": { + "source": "iana" + }, + "message/partial": { + "source": "iana", + "compressible": false + }, + "message/rfc822": { + "source": "iana", + "compressible": true, + "extensions": ["eml","mime"] + }, + "message/s-http": { + "source": "iana" + }, + "message/sip": { + "source": "iana" + }, + "message/sipfrag": { + "source": "iana" + }, + "message/tracking-status": { + "source": "iana" + }, + "message/vnd.si.simp": { + "source": "iana" + }, + "message/vnd.wfa.wsc": { + "source": "iana", + "extensions": ["wsc"] + }, + "model/3mf": { + "source": "iana", + "extensions": ["3mf"] + }, + "model/gltf+json": { + "source": "iana", + "compressible": true, + "extensions": ["gltf"] + }, + "model/gltf-binary": { + "source": "iana", + "compressible": true, + "extensions": ["glb"] + }, + "model/iges": { + "source": "iana", + "compressible": false, + "extensions": ["igs","iges"] + }, + "model/mesh": { + "source": "iana", + "compressible": false, + "extensions": ["msh","mesh","silo"] + }, + "model/stl": { + "source": "iana", + "extensions": ["stl"] + }, + "model/vnd.collada+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dae"] + }, + "model/vnd.dwf": { + "source": "iana", + "extensions": ["dwf"] + }, + "model/vnd.flatland.3dml": { + "source": "iana" + }, + "model/vnd.gdl": { + "source": "iana", + "extensions": ["gdl"] + }, + "model/vnd.gs-gdl": { + "source": "apache" + }, + "model/vnd.gs.gdl": { + "source": "iana" + }, + "model/vnd.gtw": { + "source": "iana", + "extensions": ["gtw"] + }, + "model/vnd.moml+xml": { + "source": "iana", + "compressible": true + }, + "model/vnd.mts": { + "source": "iana", + "extensions": ["mts"] + }, + "model/vnd.opengex": { + "source": "iana", + "extensions": ["ogex"] + }, + "model/vnd.parasolid.transmit.binary": { + "source": "iana", + "extensions": ["x_b"] + }, + "model/vnd.parasolid.transmit.text": { + "source": "iana", + "extensions": ["x_t"] + }, + "model/vnd.rosette.annotated-data-model": { + "source": "iana" + }, + "model/vnd.usdz+zip": { + "source": "iana", + "compressible": false, + "extensions": ["usdz"] + }, + "model/vnd.valve.source.compiled-map": { + "source": "iana", + "extensions": ["bsp"] + }, + "model/vnd.vtu": { + "source": "iana", + "extensions": ["vtu"] + }, + "model/vrml": { + "source": "iana", + "compressible": false, + "extensions": ["wrl","vrml"] + }, + "model/x3d+binary": { + "source": "apache", + "compressible": false, + "extensions": ["x3db","x3dbz"] + }, + "model/x3d+fastinfoset": { + "source": "iana", + "extensions": ["x3db"] + }, + "model/x3d+vrml": { + "source": "apache", + "compressible": false, + "extensions": ["x3dv","x3dvz"] + }, + "model/x3d+xml": { + "source": "iana", + "compressible": true, + "extensions": ["x3d","x3dz"] + }, + "model/x3d-vrml": { + "source": "iana", + "extensions": ["x3dv"] + }, + "multipart/alternative": { + "source": "iana", + "compressible": false + }, + "multipart/appledouble": { + "source": "iana" + }, + "multipart/byteranges": { + "source": "iana" + }, + "multipart/digest": { + "source": "iana" + }, + "multipart/encrypted": { + "source": "iana", + "compressible": false + }, + "multipart/form-data": { + "source": "iana", + "compressible": false + }, + "multipart/header-set": { + "source": "iana" + }, + "multipart/mixed": { + "source": "iana", + "compressible": false + }, + "multipart/multilingual": { + "source": "iana" + }, + "multipart/parallel": { + "source": "iana" + }, + "multipart/related": { + "source": "iana", + "compressible": false + }, + "multipart/report": { + "source": "iana" + }, + "multipart/signed": { + "source": "iana", + "compressible": false + }, + "multipart/vnd.bint.med-plus": { + "source": "iana" + }, + "multipart/voice-message": { + "source": "iana" + }, + "multipart/x-mixed-replace": { + "source": "iana" + }, + "text/1d-interleaved-parityfec": { + "source": "iana" + }, + "text/cache-manifest": { + "source": "iana", + "compressible": true, + "extensions": ["appcache","manifest"] + }, + "text/calendar": { + "source": "iana", + "extensions": ["ics","ifb"] + }, + "text/calender": { + "compressible": true + }, + "text/cmd": { + "compressible": true + }, + "text/coffeescript": { + "extensions": ["coffee","litcoffee"] + }, + "text/css": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["css"] + }, + "text/csv": { + "source": "iana", + "compressible": true, + "extensions": ["csv"] + }, + "text/csv-schema": { + "source": "iana" + }, + "text/directory": { + "source": "iana" + }, + "text/dns": { + "source": "iana" + }, + "text/ecmascript": { + "source": "iana" + }, + "text/encaprtp": { + "source": "iana" + }, + "text/enriched": { + "source": "iana" + }, + "text/fwdred": { + "source": "iana" + }, + "text/grammar-ref-list": { + "source": "iana" + }, + "text/html": { + "source": "iana", + "compressible": true, + "extensions": ["html","htm","shtml"] + }, + "text/jade": { + "extensions": ["jade"] + }, + "text/javascript": { + "source": "iana", + "compressible": true + }, + "text/jcr-cnd": { + "source": "iana" + }, + "text/jsx": { + "compressible": true, + "extensions": ["jsx"] + }, + "text/less": { + "compressible": true, + "extensions": ["less"] + }, + "text/markdown": { + "source": "iana", + "compressible": true, + "extensions": ["markdown","md"] + }, + "text/mathml": { + "source": "nginx", + "extensions": ["mml"] + }, + "text/mdx": { + "compressible": true, + "extensions": ["mdx"] + }, + "text/mizar": { + "source": "iana" + }, + "text/n3": { + "source": "iana", + "compressible": true, + "extensions": ["n3"] + }, + "text/parameters": { + "source": "iana" + }, + "text/parityfec": { + "source": "iana" + }, + "text/plain": { + "source": "iana", + "compressible": true, + "extensions": ["txt","text","conf","def","list","log","in","ini"] + }, + "text/provenance-notation": { + "source": "iana" + }, + "text/prs.fallenstein.rst": { + "source": "iana" + }, + "text/prs.lines.tag": { + "source": "iana", + "extensions": ["dsc"] + }, + "text/prs.prop.logic": { + "source": "iana" + }, + "text/raptorfec": { + "source": "iana" + }, + "text/red": { + "source": "iana" + }, + "text/rfc822-headers": { + "source": "iana" + }, + "text/richtext": { + "source": "iana", + "compressible": true, + "extensions": ["rtx"] + }, + "text/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "text/rtp-enc-aescm128": { + "source": "iana" + }, + "text/rtploopback": { + "source": "iana" + }, + "text/rtx": { + "source": "iana" + }, + "text/sgml": { + "source": "iana", + "extensions": ["sgml","sgm"] + }, + "text/shex": { + "extensions": ["shex"] + }, + "text/slim": { + "extensions": ["slim","slm"] + }, + "text/strings": { + "source": "iana" + }, + "text/stylus": { + "extensions": ["stylus","styl"] + }, + "text/t140": { + "source": "iana" + }, + "text/tab-separated-values": { + "source": "iana", + "compressible": true, + "extensions": ["tsv"] + }, + "text/troff": { + "source": "iana", + "extensions": ["t","tr","roff","man","me","ms"] + }, + "text/turtle": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["ttl"] + }, + "text/ulpfec": { + "source": "iana" + }, + "text/uri-list": { + "source": "iana", + "compressible": true, + "extensions": ["uri","uris","urls"] + }, + "text/vcard": { + "source": "iana", + "compressible": true, + "extensions": ["vcard"] + }, + "text/vnd.a": { + "source": "iana" + }, + "text/vnd.abc": { + "source": "iana" + }, + "text/vnd.ascii-art": { + "source": "iana" + }, + "text/vnd.curl": { + "source": "iana", + "extensions": ["curl"] + }, + "text/vnd.curl.dcurl": { + "source": "apache", + "extensions": ["dcurl"] + }, + "text/vnd.curl.mcurl": { + "source": "apache", + "extensions": ["mcurl"] + }, + "text/vnd.curl.scurl": { + "source": "apache", + "extensions": ["scurl"] + }, + "text/vnd.debian.copyright": { + "source": "iana" + }, + "text/vnd.dmclientscript": { + "source": "iana" + }, + "text/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "text/vnd.esmertec.theme-descriptor": { + "source": "iana" + }, + "text/vnd.fly": { + "source": "iana", + "extensions": ["fly"] + }, + "text/vnd.fmi.flexstor": { + "source": "iana", + "extensions": ["flx"] + }, + "text/vnd.gml": { + "source": "iana" + }, + "text/vnd.graphviz": { + "source": "iana", + "extensions": ["gv"] + }, + "text/vnd.hgl": { + "source": "iana" + }, + "text/vnd.in3d.3dml": { + "source": "iana", + "extensions": ["3dml"] + }, + "text/vnd.in3d.spot": { + "source": "iana", + "extensions": ["spot"] + }, + "text/vnd.iptc.newsml": { + "source": "iana" + }, + "text/vnd.iptc.nitf": { + "source": "iana" + }, + "text/vnd.latex-z": { + "source": "iana" + }, + "text/vnd.motorola.reflex": { + "source": "iana" + }, + "text/vnd.ms-mediapackage": { + "source": "iana" + }, + "text/vnd.net2phone.commcenter.command": { + "source": "iana" + }, + "text/vnd.radisys.msml-basic-layout": { + "source": "iana" + }, + "text/vnd.senx.warpscript": { + "source": "iana" + }, + "text/vnd.si.uricatalogue": { + "source": "iana" + }, + "text/vnd.sun.j2me.app-descriptor": { + "source": "iana", + "extensions": ["jad"] + }, + "text/vnd.trolltech.linguist": { + "source": "iana" + }, + "text/vnd.wap.si": { + "source": "iana" + }, + "text/vnd.wap.sl": { + "source": "iana" + }, + "text/vnd.wap.wml": { + "source": "iana", + "extensions": ["wml"] + }, + "text/vnd.wap.wmlscript": { + "source": "iana", + "extensions": ["wmls"] + }, + "text/vtt": { + "charset": "UTF-8", + "compressible": true, + "extensions": ["vtt"] + }, + "text/x-asm": { + "source": "apache", + "extensions": ["s","asm"] + }, + "text/x-c": { + "source": "apache", + "extensions": ["c","cc","cxx","cpp","h","hh","dic"] + }, + "text/x-component": { + "source": "nginx", + "extensions": ["htc"] + }, + "text/x-fortran": { + "source": "apache", + "extensions": ["f","for","f77","f90"] + }, + "text/x-gwt-rpc": { + "compressible": true + }, + "text/x-handlebars-template": { + "extensions": ["hbs"] + }, + "text/x-java-source": { + "source": "apache", + "extensions": ["java"] + }, + "text/x-jquery-tmpl": { + "compressible": true + }, + "text/x-lua": { + "extensions": ["lua"] + }, + "text/x-markdown": { + "compressible": true, + "extensions": ["mkd"] + }, + "text/x-nfo": { + "source": "apache", + "extensions": ["nfo"] + }, + "text/x-opml": { + "source": "apache", + "extensions": ["opml"] + }, + "text/x-org": { + "compressible": true, + "extensions": ["org"] + }, + "text/x-pascal": { + "source": "apache", + "extensions": ["p","pas"] + }, + "text/x-processing": { + "compressible": true, + "extensions": ["pde"] + }, + "text/x-sass": { + "extensions": ["sass"] + }, + "text/x-scss": { + "extensions": ["scss"] + }, + "text/x-setext": { + "source": "apache", + "extensions": ["etx"] + }, + "text/x-sfv": { + "source": "apache", + "extensions": ["sfv"] + }, + "text/x-suse-ymp": { + "compressible": true, + "extensions": ["ymp"] + }, + "text/x-uuencode": { + "source": "apache", + "extensions": ["uu"] + }, + "text/x-vcalendar": { + "source": "apache", + "extensions": ["vcs"] + }, + "text/x-vcard": { + "source": "apache", + "extensions": ["vcf"] + }, + "text/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml"] + }, + "text/xml-external-parsed-entity": { + "source": "iana" + }, + "text/yaml": { + "extensions": ["yaml","yml"] + }, + "video/1d-interleaved-parityfec": { + "source": "iana" + }, + "video/3gpp": { + "source": "iana", + "extensions": ["3gp","3gpp"] + }, + "video/3gpp-tt": { + "source": "iana" + }, + "video/3gpp2": { + "source": "iana", + "extensions": ["3g2"] + }, + "video/bmpeg": { + "source": "iana" + }, + "video/bt656": { + "source": "iana" + }, + "video/celb": { + "source": "iana" + }, + "video/dv": { + "source": "iana" + }, + "video/encaprtp": { + "source": "iana" + }, + "video/h261": { + "source": "iana", + "extensions": ["h261"] + }, + "video/h263": { + "source": "iana", + "extensions": ["h263"] + }, + "video/h263-1998": { + "source": "iana" + }, + "video/h263-2000": { + "source": "iana" + }, + "video/h264": { + "source": "iana", + "extensions": ["h264"] + }, + "video/h264-rcdo": { + "source": "iana" + }, + "video/h264-svc": { + "source": "iana" + }, + "video/h265": { + "source": "iana" + }, + "video/iso.segment": { + "source": "iana" + }, + "video/jpeg": { + "source": "iana", + "extensions": ["jpgv"] + }, + "video/jpeg2000": { + "source": "iana" + }, + "video/jpm": { + "source": "apache", + "extensions": ["jpm","jpgm"] + }, + "video/mj2": { + "source": "iana", + "extensions": ["mj2","mjp2"] + }, + "video/mp1s": { + "source": "iana" + }, + "video/mp2p": { + "source": "iana" + }, + "video/mp2t": { + "source": "iana", + "extensions": ["ts"] + }, + "video/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["mp4","mp4v","mpg4"] + }, + "video/mp4v-es": { + "source": "iana" + }, + "video/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpeg","mpg","mpe","m1v","m2v"] + }, + "video/mpeg4-generic": { + "source": "iana" + }, + "video/mpv": { + "source": "iana" + }, + "video/nv": { + "source": "iana" + }, + "video/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogv"] + }, + "video/parityfec": { + "source": "iana" + }, + "video/pointer": { + "source": "iana" + }, + "video/quicktime": { + "source": "iana", + "compressible": false, + "extensions": ["qt","mov"] + }, + "video/raptorfec": { + "source": "iana" + }, + "video/raw": { + "source": "iana" + }, + "video/rtp-enc-aescm128": { + "source": "iana" + }, + "video/rtploopback": { + "source": "iana" + }, + "video/rtx": { + "source": "iana" + }, + "video/smpte291": { + "source": "iana" + }, + "video/smpte292m": { + "source": "iana" + }, + "video/ulpfec": { + "source": "iana" + }, + "video/vc1": { + "source": "iana" + }, + "video/vc2": { + "source": "iana" + }, + "video/vnd.cctv": { + "source": "iana" + }, + "video/vnd.dece.hd": { + "source": "iana", + "extensions": ["uvh","uvvh"] + }, + "video/vnd.dece.mobile": { + "source": "iana", + "extensions": ["uvm","uvvm"] + }, + "video/vnd.dece.mp4": { + "source": "iana" + }, + "video/vnd.dece.pd": { + "source": "iana", + "extensions": ["uvp","uvvp"] + }, + "video/vnd.dece.sd": { + "source": "iana", + "extensions": ["uvs","uvvs"] + }, + "video/vnd.dece.video": { + "source": "iana", + "extensions": ["uvv","uvvv"] + }, + "video/vnd.directv.mpeg": { + "source": "iana" + }, + "video/vnd.directv.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dlna.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dvb.file": { + "source": "iana", + "extensions": ["dvb"] + }, + "video/vnd.fvt": { + "source": "iana", + "extensions": ["fvt"] + }, + "video/vnd.hns.video": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsavc": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsmpeg2": { + "source": "iana" + }, + "video/vnd.motorola.video": { + "source": "iana" + }, + "video/vnd.motorola.videop": { + "source": "iana" + }, + "video/vnd.mpegurl": { + "source": "iana", + "extensions": ["mxu","m4u"] + }, + "video/vnd.ms-playready.media.pyv": { + "source": "iana", + "extensions": ["pyv"] + }, + "video/vnd.nokia.interleaved-multimedia": { + "source": "iana" + }, + "video/vnd.nokia.mp4vr": { + "source": "iana" + }, + "video/vnd.nokia.videovoip": { + "source": "iana" + }, + "video/vnd.objectvideo": { + "source": "iana" + }, + "video/vnd.radgamettools.bink": { + "source": "iana" + }, + "video/vnd.radgamettools.smacker": { + "source": "iana" + }, + "video/vnd.sealed.mpeg1": { + "source": "iana" + }, + "video/vnd.sealed.mpeg4": { + "source": "iana" + }, + "video/vnd.sealed.swf": { + "source": "iana" + }, + "video/vnd.sealedmedia.softseal.mov": { + "source": "iana" + }, + "video/vnd.uvvu.mp4": { + "source": "iana", + "extensions": ["uvu","uvvu"] + }, + "video/vnd.vivo": { + "source": "iana", + "extensions": ["viv"] + }, + "video/vp8": { + "source": "iana" + }, + "video/webm": { + "source": "apache", + "compressible": false, + "extensions": ["webm"] + }, + "video/x-f4v": { + "source": "apache", + "extensions": ["f4v"] + }, + "video/x-fli": { + "source": "apache", + "extensions": ["fli"] + }, + "video/x-flv": { + "source": "apache", + "compressible": false, + "extensions": ["flv"] + }, + "video/x-m4v": { + "source": "apache", + "extensions": ["m4v"] + }, + "video/x-matroska": { + "source": "apache", + "compressible": false, + "extensions": ["mkv","mk3d","mks"] + }, + "video/x-mng": { + "source": "apache", + "extensions": ["mng"] + }, + "video/x-ms-asf": { + "source": "apache", + "extensions": ["asf","asx"] + }, + "video/x-ms-vob": { + "source": "apache", + "extensions": ["vob"] + }, + "video/x-ms-wm": { + "source": "apache", + "extensions": ["wm"] + }, + "video/x-ms-wmv": { + "source": "apache", + "compressible": false, + "extensions": ["wmv"] + }, + "video/x-ms-wmx": { + "source": "apache", + "extensions": ["wmx"] + }, + "video/x-ms-wvx": { + "source": "apache", + "extensions": ["wvx"] + }, + "video/x-msvideo": { + "source": "apache", + "extensions": ["avi"] + }, + "video/x-sgi-movie": { + "source": "apache", + "extensions": ["movie"] + }, + "video/x-smv": { + "source": "apache", + "extensions": ["smv"] + }, + "x-conference/x-cooltalk": { + "source": "apache", + "extensions": ["ice"] + }, + "x-shader/x-fragment": { + "compressible": true + }, + "x-shader/x-vertex": { + "compressible": true + } +} diff --git a/node_modules/mime-db/index.js b/node_modules/mime-db/index.js new file mode 100644 index 00000000..551031f6 --- /dev/null +++ b/node_modules/mime-db/index.js @@ -0,0 +1,11 @@ +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = require('./db.json') diff --git a/node_modules/mime-db/package.json b/node_modules/mime-db/package.json new file mode 100644 index 00000000..07db1ecc --- /dev/null +++ b/node_modules/mime-db/package.json @@ -0,0 +1,58 @@ +{ + "name": "mime-db", + "description": "Media Type Database", + "version": "1.40.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)", + "Robert Kieffer (http://github.com/broofa)" + ], + "license": "MIT", + "keywords": [ + "mime", + "db", + "type", + "types", + "database", + "charset", + "charsets" + ], + "repository": "jshttp/mime-db", + "devDependencies": { + "bluebird": "3.5.4", + "co": "4.6.0", + "cogent": "1.0.1", + "csv-parse": "4.3.4", + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "gnode": "0.1.2", + "mocha": "6.1.4", + "nyc": "14.0.0", + "raw-body": "2.3.3", + "stream-to-array": "2.3.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "db.json", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "build": "node scripts/build", + "fetch": "node scripts/fetch-apache && gnode scripts/fetch-iana && node scripts/fetch-nginx", + "lint": "eslint .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test", + "update": "npm run fetch && npm run build", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/mime-types/HISTORY.md b/node_modules/mime-types/HISTORY.md new file mode 100644 index 00000000..aef15bb3 --- /dev/null +++ b/node_modules/mime-types/HISTORY.md @@ -0,0 +1,308 @@ +2.1.24 / 2019-04-20 +=================== + + * deps: mime-db@1.40.0 + - Add extensions from IANA for `model/*` types + - Add `text/mdx` with extension `.mdx` + +2.1.23 / 2019-04-17 +=================== + + * deps: mime-db@~1.39.0 + - Add extensions `.siv` and `.sieve` to `application/sieve` + - Add new upstream MIME types + +2.1.22 / 2019-02-14 +=================== + + * deps: mime-db@~1.38.0 + - Add extension `.nq` to `application/n-quads` + - Add extension `.nt` to `application/n-triples` + - Add new upstream MIME types + - Mark `text/less` as compressible + +2.1.21 / 2018-10-19 +=================== + + * deps: mime-db@~1.37.0 + - Add extensions to HEIC image types + - Add new upstream MIME types + +2.1.20 / 2018-08-26 +=================== + + * deps: mime-db@~1.36.0 + - Add Apple file extensions from IANA + - Add extensions from IANA for `image/*` types + - Add new upstream MIME types + +2.1.19 / 2018-07-17 +=================== + + * deps: mime-db@~1.35.0 + - Add extension `.csl` to `application/vnd.citationstyles.style+xml` + - Add extension `.es` to `application/ecmascript` + - Add extension `.owl` to `application/rdf+xml` + - Add new upstream MIME types + - Add UTF-8 as default charset for `text/turtle` + +2.1.18 / 2018-02-16 +=================== + + * deps: mime-db@~1.33.0 + - Add `application/raml+yaml` with extension `.raml` + - Add `application/wasm` with extension `.wasm` + - Add `text/shex` with extension `.shex` + - Add extensions for JPEG-2000 images + - Add extensions from IANA for `message/*` types + - Add new upstream MIME types + - Update font MIME types + - Update `text/hjson` to registered `application/hjson` + +2.1.17 / 2017-09-01 +=================== + + * deps: mime-db@~1.30.0 + - Add `application/vnd.ms-outlook` + - Add `application/x-arj` + - Add extension `.mjs` to `application/javascript` + - Add glTF types and extensions + - Add new upstream MIME types + - Add `text/x-org` + - Add VirtualBox MIME types + - Fix `source` records for `video/*` types that are IANA + - Update `font/opentype` to registered `font/otf` + +2.1.16 / 2017-07-24 +=================== + + * deps: mime-db@~1.29.0 + - Add `application/fido.trusted-apps+json` + - Add extension `.wadl` to `application/vnd.sun.wadl+xml` + - Add extension `.gz` to `application/gzip` + - Add new upstream MIME types + - Update extensions `.md` and `.markdown` to be `text/markdown` + +2.1.15 / 2017-03-23 +=================== + + * deps: mime-db@~1.27.0 + - Add new mime types + - Add `image/apng` + +2.1.14 / 2017-01-14 +=================== + + * deps: mime-db@~1.26.0 + - Add new mime types + +2.1.13 / 2016-11-18 +=================== + + * deps: mime-db@~1.25.0 + - Add new mime types + +2.1.12 / 2016-09-18 +=================== + + * deps: mime-db@~1.24.0 + - Add new mime types + - Add `audio/mp3` + +2.1.11 / 2016-05-01 +=================== + + * deps: mime-db@~1.23.0 + - Add new mime types + +2.1.10 / 2016-02-15 +=================== + + * deps: mime-db@~1.22.0 + - Add new mime types + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + +2.1.9 / 2016-01-06 +================== + + * deps: mime-db@~1.21.0 + - Add new mime types + +2.1.8 / 2015-11-30 +================== + + * deps: mime-db@~1.20.0 + - Add new mime types + +2.1.7 / 2015-09-20 +================== + + * deps: mime-db@~1.19.0 + - Add new mime types + +2.1.6 / 2015-09-03 +================== + + * deps: mime-db@~1.18.0 + - Add new mime types + +2.1.5 / 2015-08-20 +================== + + * deps: mime-db@~1.17.0 + - Add new mime types + +2.1.4 / 2015-07-30 +================== + + * deps: mime-db@~1.16.0 + - Add new mime types + +2.1.3 / 2015-07-13 +================== + + * deps: mime-db@~1.15.0 + - Add new mime types + +2.1.2 / 2015-06-25 +================== + + * deps: mime-db@~1.14.0 + - Add new mime types + +2.1.1 / 2015-06-08 +================== + + * perf: fix deopt during mapping + +2.1.0 / 2015-06-07 +================== + + * Fix incorrectly treating extension-less file name as extension + - i.e. `'path/to/json'` will no longer return `application/json` + * Fix `.charset(type)` to accept parameters + * Fix `.charset(type)` to match case-insensitive + * Improve generation of extension to MIME mapping + * Refactor internals for readability and no argument reassignment + * Prefer `application/*` MIME types from the same source + * Prefer any type over `application/octet-stream` + * deps: mime-db@~1.13.0 + - Add nginx as a source + - Add new mime types + +2.0.14 / 2015-06-06 +=================== + + * deps: mime-db@~1.12.0 + - Add new mime types + +2.0.13 / 2015-05-31 +=================== + + * deps: mime-db@~1.11.0 + - Add new mime types + +2.0.12 / 2015-05-19 +=================== + + * deps: mime-db@~1.10.0 + - Add new mime types + +2.0.11 / 2015-05-05 +=================== + + * deps: mime-db@~1.9.1 + - Add new mime types + +2.0.10 / 2015-03-13 +=================== + + * deps: mime-db@~1.8.0 + - Add new mime types + +2.0.9 / 2015-02-09 +================== + + * deps: mime-db@~1.7.0 + - Add new mime types + - Community extensions ownership transferred from `node-mime` + +2.0.8 / 2015-01-29 +================== + + * deps: mime-db@~1.6.0 + - Add new mime types + +2.0.7 / 2014-12-30 +================== + + * deps: mime-db@~1.5.0 + - Add new mime types + - Fix various invalid MIME type entries + +2.0.6 / 2014-12-30 +================== + + * deps: mime-db@~1.4.0 + - Add new mime types + - Fix various invalid MIME type entries + - Remove example template MIME types + +2.0.5 / 2014-12-29 +================== + + * deps: mime-db@~1.3.1 + - Fix missing extensions + +2.0.4 / 2014-12-10 +================== + + * deps: mime-db@~1.3.0 + - Add new mime types + +2.0.3 / 2014-11-09 +================== + + * deps: mime-db@~1.2.0 + - Add new mime types + +2.0.2 / 2014-09-28 +================== + + * deps: mime-db@~1.1.0 + - Add new mime types + - Add additional compressible + - Update charsets + +2.0.1 / 2014-09-07 +================== + + * Support Node.js 0.6 + +2.0.0 / 2014-09-02 +================== + + * Use `mime-db` + * Remove `.define()` + +1.0.2 / 2014-08-04 +================== + + * Set charset=utf-8 for `text/javascript` + +1.0.1 / 2014-06-24 +================== + + * Add `text/jsx` type + +1.0.0 / 2014-05-12 +================== + + * Return `false` for unknown types + * Set charset=utf-8 for `application/json` + +0.1.0 / 2014-05-02 +================== + + * Initial release diff --git a/node_modules/mime-types/LICENSE b/node_modules/mime-types/LICENSE new file mode 100644 index 00000000..06166077 --- /dev/null +++ b/node_modules/mime-types/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-types/README.md b/node_modules/mime-types/README.md new file mode 100644 index 00000000..1dbef2b5 --- /dev/null +++ b/node_modules/mime-types/README.md @@ -0,0 +1,113 @@ +# mime-types + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +The ultimate javascript content-type utility. + +Similar to [the `mime@1.x` module](https://www.npmjs.com/package/mime), except: + +- __No fallbacks.__ Instead of naively returning the first available type, + `mime-types` simply returns `false`, so do + `var type = mime.lookup('unrecognized') || 'application/octet-stream'`. +- No `new Mime()` business, so you could do `var lookup = require('mime-types').lookup`. +- No `.define()` functionality +- Bug fixes for `.lookup(path)` + +Otherwise, the API is compatible with `mime` 1.x. + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install mime-types +``` + +## Adding Types + +All mime types are based on [mime-db](https://www.npmjs.com/package/mime-db), +so open a PR there if you'd like to add mime types. + +## API + +```js +var mime = require('mime-types') +``` + +All functions return `false` if input is invalid or not found. + +### mime.lookup(path) + +Lookup the content-type associated with a file. + +```js +mime.lookup('json') // 'application/json' +mime.lookup('.md') // 'text/markdown' +mime.lookup('file.html') // 'text/html' +mime.lookup('folder/file.js') // 'application/javascript' +mime.lookup('folder/.htaccess') // false + +mime.lookup('cats') // false +``` + +### mime.contentType(type) + +Create a full content-type header given a content-type or extension. +When given an extension, `mime.lookup` is used to get the matching +content-type, otherwise the given content-type is used. Then if the +content-type does not already have a `charset` parameter, `mime.charset` +is used to get the default charset and add to the returned content-type. + +```js +mime.contentType('markdown') // 'text/x-markdown; charset=utf-8' +mime.contentType('file.json') // 'application/json; charset=utf-8' +mime.contentType('text/html') // 'text/html; charset=utf-8' +mime.contentType('text/html; charset=iso-8859-1') // 'text/html; charset=iso-8859-1' + +// from a full path +mime.contentType(path.extname('/path/to/file.json')) // 'application/json; charset=utf-8' +``` + +### mime.extension(type) + +Get the default extension for a content-type. + +```js +mime.extension('application/octet-stream') // 'bin' +``` + +### mime.charset(type) + +Lookup the implied default charset of a content-type. + +```js +mime.charset('text/markdown') // 'UTF-8' +``` + +### var type = mime.types[extension] + +A map of content-types by extension. + +### [extensions...] = mime.extensions[type] + +A map of extensions by content-type. + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-types/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-types?branch=master +[node-version-image]: https://badgen.net/npm/node/mime-types +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-types +[npm-url]: https://npmjs.org/package/mime-types +[npm-version-image]: https://badgen.net/npm/v/mime-types +[travis-image]: https://badgen.net/travis/jshttp/mime-types/master +[travis-url]: https://travis-ci.org/jshttp/mime-types diff --git a/node_modules/mime-types/index.js b/node_modules/mime-types/index.js new file mode 100644 index 00000000..b9f34d59 --- /dev/null +++ b/node_modules/mime-types/index.js @@ -0,0 +1,188 @@ +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var db = require('mime-db') +var extname = require('path').extname + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return + } + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) +} diff --git a/node_modules/mime-types/package.json b/node_modules/mime-types/package.json new file mode 100644 index 00000000..914b39cc --- /dev/null +++ b/node_modules/mime-types/package.json @@ -0,0 +1,43 @@ +{ + "name": "mime-types", + "description": "The ultimate javascript content-type utility.", + "version": "2.1.24", + "contributors": [ + "Douglas Christopher Wilson ", + "Jeremiah Senkpiel (https://searchbeam.jit.su)", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "keywords": [ + "mime", + "types" + ], + "repository": "jshttp/mime-types", + "dependencies": { + "mime-db": "1.40.0" + }, + "devDependencies": { + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.17.2", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "mocha": "6.1.4", + "nyc": "14.0.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec test/test.js", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test" + } +} diff --git a/node_modules/mime/.npmignore b/node_modules/mime/.npmignore new file mode 100644 index 00000000..e69de29b diff --git a/node_modules/mime/CHANGELOG.md b/node_modules/mime/CHANGELOG.md new file mode 100644 index 00000000..f1275350 --- /dev/null +++ b/node_modules/mime/CHANGELOG.md @@ -0,0 +1,164 @@ +# Changelog + +## v1.6.0 (24/11/2017) +*No changelog for this release.* + +--- + +## v2.0.4 (24/11/2017) +- [**closed**] Switch to mime-score module for resolving extension contention issues. [#182](https://github.com/broofa/node-mime/issues/182) +- [**closed**] Update mime-db to 1.31.0 in v1.x branch [#181](https://github.com/broofa/node-mime/issues/181) + +--- + +## v1.5.0 (22/11/2017) +- [**closed**] need ES5 version ready in npm package [#179](https://github.com/broofa/node-mime/issues/179) +- [**closed**] mime-db no trace of iWork - pages / numbers / etc. [#178](https://github.com/broofa/node-mime/issues/178) +- [**closed**] How it works in brownser ? [#176](https://github.com/broofa/node-mime/issues/176) +- [**closed**] Missing `./Mime` [#175](https://github.com/broofa/node-mime/issues/175) +- [**closed**] Vulnerable Regular Expression [#167](https://github.com/broofa/node-mime/issues/167) + +--- + +## v2.0.3 (25/09/2017) +*No changelog for this release.* + +--- + +## v1.4.1 (25/09/2017) +- [**closed**] Issue when bundling with webpack [#172](https://github.com/broofa/node-mime/issues/172) + +--- + +## v2.0.2 (15/09/2017) +- [**V2**] fs.readFileSync is not a function [#165](https://github.com/broofa/node-mime/issues/165) +- [**closed**] The extension for video/quicktime should map to .mov, not .qt [#164](https://github.com/broofa/node-mime/issues/164) +- [**V2**] [v2 Feedback request] Mime class API [#163](https://github.com/broofa/node-mime/issues/163) +- [**V2**] [v2 Feedback request] Resolving conflicts over extensions [#162](https://github.com/broofa/node-mime/issues/162) +- [**V2**] Allow callers to load module with official, full, or no defined types. [#161](https://github.com/broofa/node-mime/issues/161) +- [**V2**] Use "facets" to resolve extension conflicts [#160](https://github.com/broofa/node-mime/issues/160) +- [**V2**] Remove fs and path dependencies [#152](https://github.com/broofa/node-mime/issues/152) +- [**V2**] Default content-type should not be application/octet-stream [#139](https://github.com/broofa/node-mime/issues/139) +- [**V2**] reset mime-types [#124](https://github.com/broofa/node-mime/issues/124) +- [**V2**] Extensionless paths should return null or false [#113](https://github.com/broofa/node-mime/issues/113) + +--- + +## v2.0.1 (14/09/2017) +- [**closed**] Changelog for v2.0 does not mention breaking changes [#171](https://github.com/broofa/node-mime/issues/171) +- [**closed**] MIME breaking with 'class' declaration as it is without 'use strict mode' [#170](https://github.com/broofa/node-mime/issues/170) + +--- + +## v2.0.0 (12/09/2017) +- [**closed**] woff and woff2 [#168](https://github.com/broofa/node-mime/issues/168) + +--- + +## v1.4.0 (28/08/2017) +- [**closed**] support for ac3 voc files [#159](https://github.com/broofa/node-mime/issues/159) +- [**closed**] Help understanding change from application/xml to text/xml [#158](https://github.com/broofa/node-mime/issues/158) +- [**closed**] no longer able to override mimetype [#157](https://github.com/broofa/node-mime/issues/157) +- [**closed**] application/vnd.adobe.photoshop [#147](https://github.com/broofa/node-mime/issues/147) +- [**closed**] Directories should appear as something other than application/octet-stream [#135](https://github.com/broofa/node-mime/issues/135) +- [**closed**] requested features [#131](https://github.com/broofa/node-mime/issues/131) +- [**closed**] Make types.json loading optional? [#129](https://github.com/broofa/node-mime/issues/129) +- [**closed**] Cannot find module './types.json' [#120](https://github.com/broofa/node-mime/issues/120) +- [**V2**] .wav files show up as "audio/x-wav" instead of "audio/x-wave" [#118](https://github.com/broofa/node-mime/issues/118) +- [**closed**] Don't be a pain in the ass for node community [#108](https://github.com/broofa/node-mime/issues/108) +- [**closed**] don't make default_type global [#78](https://github.com/broofa/node-mime/issues/78) +- [**closed**] mime.extension() fails if the content-type is parameterized [#74](https://github.com/broofa/node-mime/issues/74) + +--- + +## v1.3.6 (11/05/2017) +- [**closed**] .md should be text/markdown as of March 2016 [#154](https://github.com/broofa/node-mime/issues/154) +- [**closed**] Error while installing mime [#153](https://github.com/broofa/node-mime/issues/153) +- [**closed**] application/manifest+json [#149](https://github.com/broofa/node-mime/issues/149) +- [**closed**] Dynamic adaptive streaming over HTTP (DASH) file extension typo [#141](https://github.com/broofa/node-mime/issues/141) +- [**closed**] charsets image/png undefined [#140](https://github.com/broofa/node-mime/issues/140) +- [**closed**] Mime-db dependency out of date [#130](https://github.com/broofa/node-mime/issues/130) +- [**closed**] how to support plist? [#126](https://github.com/broofa/node-mime/issues/126) +- [**closed**] how does .types file format look like? [#123](https://github.com/broofa/node-mime/issues/123) +- [**closed**] Feature: support for expanding MIME patterns [#121](https://github.com/broofa/node-mime/issues/121) +- [**closed**] DEBUG_MIME doesn't work [#117](https://github.com/broofa/node-mime/issues/117) + +--- + +## v1.3.4 (06/02/2015) +*No changelog for this release.* + +--- + +## v1.3.3 (06/02/2015) +*No changelog for this release.* + +--- + +## v1.3.1 (05/02/2015) +- [**closed**] Consider adding support for Handlebars .hbs file ending [#111](https://github.com/broofa/node-mime/issues/111) +- [**closed**] Consider adding support for hjson. [#110](https://github.com/broofa/node-mime/issues/110) +- [**closed**] Add mime type for Opus audio files [#94](https://github.com/broofa/node-mime/issues/94) +- [**closed**] Consider making the `Requesting New Types` information more visible [#77](https://github.com/broofa/node-mime/issues/77) + +--- + +## v1.3.0 (05/02/2015) +- [**closed**] Add common name? [#114](https://github.com/broofa/node-mime/issues/114) +- [**closed**] application/x-yaml [#104](https://github.com/broofa/node-mime/issues/104) +- [**closed**] Add mime type for WOFF file format 2.0 [#102](https://github.com/broofa/node-mime/issues/102) +- [**closed**] application/x-msi for .msi [#99](https://github.com/broofa/node-mime/issues/99) +- [**closed**] Add mimetype for gettext translation files [#98](https://github.com/broofa/node-mime/issues/98) +- [**closed**] collaborators [#88](https://github.com/broofa/node-mime/issues/88) +- [**closed**] getting errot in installation of mime module...any1 can help? [#87](https://github.com/broofa/node-mime/issues/87) +- [**closed**] should application/json's charset be utf8? [#86](https://github.com/broofa/node-mime/issues/86) +- [**closed**] Add "license" and "licenses" to package.json [#81](https://github.com/broofa/node-mime/issues/81) +- [**closed**] lookup with extension-less file on Windows returns wrong type [#68](https://github.com/broofa/node-mime/issues/68) + +--- + +## v1.2.11 (15/08/2013) +- [**closed**] Update mime.types [#65](https://github.com/broofa/node-mime/issues/65) +- [**closed**] Publish a new version [#63](https://github.com/broofa/node-mime/issues/63) +- [**closed**] README should state upfront that "application/octet-stream" is default for unknown extension [#55](https://github.com/broofa/node-mime/issues/55) +- [**closed**] Suggested improvement to the charset API [#52](https://github.com/broofa/node-mime/issues/52) + +--- + +## v1.2.10 (25/07/2013) +- [**closed**] Mime type for woff files should be application/font-woff and not application/x-font-woff [#62](https://github.com/broofa/node-mime/issues/62) +- [**closed**] node.types in conflict with mime.types [#51](https://github.com/broofa/node-mime/issues/51) + +--- + +## v1.2.9 (17/01/2013) +- [**closed**] Please update "mime" NPM [#49](https://github.com/broofa/node-mime/issues/49) +- [**closed**] Please add semicolon [#46](https://github.com/broofa/node-mime/issues/46) +- [**closed**] parse full mime types [#43](https://github.com/broofa/node-mime/issues/43) + +--- + +## v1.2.8 (10/01/2013) +- [**closed**] /js directory mime is application/javascript. Is it correct? [#47](https://github.com/broofa/node-mime/issues/47) +- [**closed**] Add mime types for lua code. [#45](https://github.com/broofa/node-mime/issues/45) + +--- + +## v1.2.7 (19/10/2012) +- [**closed**] cannot install 1.2.7 via npm [#41](https://github.com/broofa/node-mime/issues/41) +- [**closed**] Transfer ownership to @broofa [#36](https://github.com/broofa/node-mime/issues/36) +- [**closed**] it's wrong to set charset to UTF-8 for text [#30](https://github.com/broofa/node-mime/issues/30) +- [**closed**] Allow multiple instances of MIME types container [#27](https://github.com/broofa/node-mime/issues/27) + +--- + +## v1.2.5 (16/02/2012) +- [**closed**] When looking up a types, check hasOwnProperty [#23](https://github.com/broofa/node-mime/issues/23) +- [**closed**] Bump version to 1.2.2 [#18](https://github.com/broofa/node-mime/issues/18) +- [**closed**] No license [#16](https://github.com/broofa/node-mime/issues/16) +- [**closed**] Some types missing that are used by html5/css3 [#13](https://github.com/broofa/node-mime/issues/13) +- [**closed**] npm install fails for 1.2.1 [#12](https://github.com/broofa/node-mime/issues/12) +- [**closed**] image/pjpeg + image/x-png [#10](https://github.com/broofa/node-mime/issues/10) +- [**closed**] symlink [#8](https://github.com/broofa/node-mime/issues/8) +- [**closed**] gzip [#2](https://github.com/broofa/node-mime/issues/2) +- [**closed**] ALL CAPS filenames return incorrect mime type [#1](https://github.com/broofa/node-mime/issues/1) diff --git a/node_modules/mime/LICENSE b/node_modules/mime/LICENSE new file mode 100644 index 00000000..d3f46f7e --- /dev/null +++ b/node_modules/mime/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010 Benjamin Thomas, Robert Kieffer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/mime/README.md b/node_modules/mime/README.md new file mode 100644 index 00000000..506fbe55 --- /dev/null +++ b/node_modules/mime/README.md @@ -0,0 +1,90 @@ +# mime + +Comprehensive MIME type mapping API based on mime-db module. + +## Install + +Install with [npm](http://github.com/isaacs/npm): + + npm install mime + +## Contributing / Testing + + npm run test + +## Command Line + + mime [path_string] + +E.g. + + > mime scripts/jquery.js + application/javascript + +## API - Queries + +### mime.lookup(path) +Get the mime type associated with a file, if no mime type is found `application/octet-stream` is returned. Performs a case-insensitive lookup using the extension in `path` (the substring after the last '/' or '.'). E.g. + +```js +var mime = require('mime'); + +mime.lookup('/path/to/file.txt'); // => 'text/plain' +mime.lookup('file.txt'); // => 'text/plain' +mime.lookup('.TXT'); // => 'text/plain' +mime.lookup('htm'); // => 'text/html' +``` + +### mime.default_type +Sets the mime type returned when `mime.lookup` fails to find the extension searched for. (Default is `application/octet-stream`.) + +### mime.extension(type) +Get the default extension for `type` + +```js +mime.extension('text/html'); // => 'html' +mime.extension('application/octet-stream'); // => 'bin' +``` + +### mime.charsets.lookup() + +Map mime-type to charset + +```js +mime.charsets.lookup('text/plain'); // => 'UTF-8' +``` + +(The logic for charset lookups is pretty rudimentary. Feel free to suggest improvements.) + +## API - Defining Custom Types + +Custom type mappings can be added on a per-project basis via the following APIs. + +### mime.define() + +Add custom mime/extension mappings + +```js +mime.define({ + 'text/x-some-format': ['x-sf', 'x-sft', 'x-sfml'], + 'application/x-my-type': ['x-mt', 'x-mtt'], + // etc ... +}); + +mime.lookup('x-sft'); // => 'text/x-some-format' +``` + +The first entry in the extensions array is returned by `mime.extension()`. E.g. + +```js +mime.extension('text/x-some-format'); // => 'x-sf' +``` + +### mime.load(filepath) + +Load mappings from an Apache ".types" format file + +```js +mime.load('./my_project.types'); +``` +The .types file format is simple - See the `types` dir for examples. diff --git a/node_modules/mime/cli.js b/node_modules/mime/cli.js new file mode 100755 index 00000000..20b1ffeb --- /dev/null +++ b/node_modules/mime/cli.js @@ -0,0 +1,8 @@ +#!/usr/bin/env node + +var mime = require('./mime.js'); +var file = process.argv[2]; +var type = mime.lookup(file); + +process.stdout.write(type + '\n'); + diff --git a/node_modules/mime/mime.js b/node_modules/mime/mime.js new file mode 100644 index 00000000..d7efbde7 --- /dev/null +++ b/node_modules/mime/mime.js @@ -0,0 +1,108 @@ +var path = require('path'); +var fs = require('fs'); + +function Mime() { + // Map of extension -> mime type + this.types = Object.create(null); + + // Map of mime type -> extension + this.extensions = Object.create(null); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * @param map (Object) type definitions + */ +Mime.prototype.define = function (map) { + for (var type in map) { + var exts = map[type]; + for (var i = 0; i < exts.length; i++) { + if (process.env.DEBUG_MIME && this.types[exts[i]]) { + console.warn((this._loading || "define()").replace(/.*\//, ''), 'changes "' + exts[i] + '" extension type from ' + + this.types[exts[i]] + ' to ' + type); + } + + this.types[exts[i]] = type; + } + + // Default extension is the first one we encounter + if (!this.extensions[type]) { + this.extensions[type] = exts[0]; + } + } +}; + +/** + * Load an Apache2-style ".types" file + * + * This may be called multiple times (it's expected). Where files declare + * overlapping types/extensions, the last file wins. + * + * @param file (String) path of file to load. + */ +Mime.prototype.load = function(file) { + this._loading = file; + // Read file and split into lines + var map = {}, + content = fs.readFileSync(file, 'ascii'), + lines = content.split(/[\r\n]+/); + + lines.forEach(function(line) { + // Clean up whitespace/comments, and split into fields + var fields = line.replace(/\s*#.*|^\s*|\s*$/g, '').split(/\s+/); + map[fields.shift()] = fields; + }); + + this.define(map); + + this._loading = null; +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.lookup = function(path, fallback) { + var ext = path.replace(/^.*[\.\/\\]/, '').toLowerCase(); + + return this.types[ext] || fallback || this.default_type; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.extension = function(mimeType) { + var type = mimeType.match(/^\s*([^;\s]*)(?:;|\s|$)/)[1].toLowerCase(); + return this.extensions[type]; +}; + +// Default instance +var mime = new Mime(); + +// Define built-in types +mime.define(require('./types.json')); + +// Default type +mime.default_type = mime.lookup('bin'); + +// +// Additional API specific to the default instance +// + +mime.Mime = Mime; + +/** + * Lookup a charset based on mime type. + */ +mime.charsets = { + lookup: function(mimeType, fallback) { + // Assume text types are utf8 + return (/^text\/|^application\/(javascript|json)/).test(mimeType) ? 'UTF-8' : fallback; + } +}; + +module.exports = mime; diff --git a/node_modules/mime/package.json b/node_modules/mime/package.json new file mode 100644 index 00000000..6bd24bc5 --- /dev/null +++ b/node_modules/mime/package.json @@ -0,0 +1,44 @@ +{ + "author": { + "name": "Robert Kieffer", + "url": "http://github.com/broofa", + "email": "robert@broofa.com" + }, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + }, + "contributors": [ + { + "name": "Benjamin Thomas", + "url": "http://github.com/bentomas", + "email": "benjamin@benjaminthomas.org" + } + ], + "description": "A comprehensive library for mime-type mapping", + "license": "MIT", + "dependencies": {}, + "devDependencies": { + "github-release-notes": "0.13.1", + "mime-db": "1.31.0", + "mime-score": "1.1.0" + }, + "scripts": { + "prepare": "node src/build.js", + "changelog": "gren changelog --tags=all --generate --override", + "test": "node src/test.js" + }, + "keywords": [ + "util", + "mime" + ], + "main": "mime.js", + "name": "mime", + "repository": { + "url": "https://github.com/broofa/node-mime", + "type": "git" + }, + "version": "1.6.0" +} diff --git a/node_modules/mime/src/build.js b/node_modules/mime/src/build.js new file mode 100755 index 00000000..4928e48b --- /dev/null +++ b/node_modules/mime/src/build.js @@ -0,0 +1,53 @@ +#!/usr/bin/env node + +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const mimeScore = require('mime-score'); + +let db = require('mime-db'); +let chalk = require('chalk'); + +const STANDARD_FACET_SCORE = 900; + +const byExtension = {}; + +// Clear out any conflict extensions in mime-db +for (let type in db) { + let entry = db[type]; + entry.type = type; + + if (!entry.extensions) continue; + + entry.extensions.forEach(ext => { + if (ext in byExtension) { + const e0 = entry; + const e1 = byExtension[ext]; + e0.pri = mimeScore(e0.type, e0.source); + e1.pri = mimeScore(e1.type, e1.source); + + let drop = e0.pri < e1.pri ? e0 : e1; + let keep = e0.pri >= e1.pri ? e0 : e1; + drop.extensions = drop.extensions.filter(e => e !== ext); + + console.log(`${ext}: Keeping ${chalk.green(keep.type)} (${keep.pri}), dropping ${chalk.red(drop.type)} (${drop.pri})`); + } + byExtension[ext] = entry; + }); +} + +function writeTypesFile(types, path) { + fs.writeFileSync(path, JSON.stringify(types)); +} + +// Segregate into standard and non-standard types based on facet per +// https://tools.ietf.org/html/rfc6838#section-3.1 +const types = {}; + +Object.keys(db).sort().forEach(k => { + const entry = db[k]; + types[entry.type] = entry.extensions; +}); + +writeTypesFile(types, path.join(__dirname, '..', 'types.json')); diff --git a/node_modules/mime/src/test.js b/node_modules/mime/src/test.js new file mode 100644 index 00000000..42958a20 --- /dev/null +++ b/node_modules/mime/src/test.js @@ -0,0 +1,60 @@ +/** + * Usage: node test.js + */ + +var mime = require('../mime'); +var assert = require('assert'); +var path = require('path'); + +// +// Test mime lookups +// + +assert.equal('text/plain', mime.lookup('text.txt')); // normal file +assert.equal('text/plain', mime.lookup('TEXT.TXT')); // uppercase +assert.equal('text/plain', mime.lookup('dir/text.txt')); // dir + file +assert.equal('text/plain', mime.lookup('.text.txt')); // hidden file +assert.equal('text/plain', mime.lookup('.txt')); // nameless +assert.equal('text/plain', mime.lookup('txt')); // extension-only +assert.equal('text/plain', mime.lookup('/txt')); // extension-less () +assert.equal('text/plain', mime.lookup('\\txt')); // Windows, extension-less +assert.equal('application/octet-stream', mime.lookup('text.nope')); // unrecognized +assert.equal('fallback', mime.lookup('text.fallback', 'fallback')); // alternate default + +// +// Test extensions +// + +assert.equal('txt', mime.extension(mime.types.text)); +assert.equal('html', mime.extension(mime.types.htm)); +assert.equal('bin', mime.extension('application/octet-stream')); +assert.equal('bin', mime.extension('application/octet-stream ')); +assert.equal('html', mime.extension(' text/html; charset=UTF-8')); +assert.equal('html', mime.extension('text/html; charset=UTF-8 ')); +assert.equal('html', mime.extension('text/html; charset=UTF-8')); +assert.equal('html', mime.extension('text/html ; charset=UTF-8')); +assert.equal('html', mime.extension('text/html;charset=UTF-8')); +assert.equal('html', mime.extension('text/Html;charset=UTF-8')); +assert.equal(undefined, mime.extension('unrecognized')); + +// +// Test node.types lookups +// + +assert.equal('font/woff', mime.lookup('file.woff')); +assert.equal('application/octet-stream', mime.lookup('file.buffer')); +// TODO: Uncomment once #157 is resolved +// assert.equal('audio/mp4', mime.lookup('file.m4a')); +assert.equal('font/otf', mime.lookup('file.otf')); + +// +// Test charsets +// + +assert.equal('UTF-8', mime.charsets.lookup('text/plain')); +assert.equal('UTF-8', mime.charsets.lookup(mime.types.js)); +assert.equal('UTF-8', mime.charsets.lookup(mime.types.json)); +assert.equal(undefined, mime.charsets.lookup(mime.types.bin)); +assert.equal('fallback', mime.charsets.lookup('application/octet-stream', 'fallback')); + +console.log('\nAll tests passed'); diff --git a/node_modules/mime/types.json b/node_modules/mime/types.json new file mode 100644 index 00000000..bec78abd --- /dev/null +++ b/node_modules/mime/types.json @@ -0,0 +1 @@ +{"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomsvc+xml":["atomsvc"],"application/bdoc":["bdoc"],"application/ccxml+xml":["ccxml"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["ecma"],"application/emma+xml":["emma"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/font-tdpfr":["pfr"],"application/font-woff":[],"application/font-woff2":[],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/prs.cww":["cww"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":[],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":[],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":[],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":[],"application/x-msdownload":["com","bat"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["wmf","emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":[],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"application/xaml+xml":["xaml"],"application/xcap-diff+xml":["xdf"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":[],"audio/adpcm":["adp"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mp3":[],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/wav":["wav"],"audio/wave":[],"audio/webm":["weba"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":[],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":[],"audio/x-wav":[],"audio/xm":["xm"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/apng":["apng"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/g3fax":["g3"],"image/gif":["gif"],"image/ief":["ief"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/ktx":["ktx"],"image/png":["png"],"image/prs.btif":["btif"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/tiff":["tiff","tif"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":[],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/webp":["webp"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":[],"image/x-pcx":["pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/rfc822":["eml","mime"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.vtu":["vtu"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["x3db","x3dbz"],"model/x3d+vrml":["x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/hjson":["hjson"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/prs.lines.tag":["dsc"],"text/richtext":["rtx"],"text/rtf":[],"text/sgml":["sgml","sgm"],"text/slim":["slim","slm"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/vtt":["vtt"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":[],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"text/xml":[],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/jpeg":["jpgv"],"video/jpm":["jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/webm":["webm"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]} \ No newline at end of file diff --git a/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minimatch/README.md b/node_modules/minimatch/README.md new file mode 100644 index 00000000..ad72b813 --- /dev/null +++ b/node_modules/minimatch/README.md @@ -0,0 +1,209 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.svg)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/minimatch/minimatch.js b/node_modules/minimatch/minimatch.js new file mode 100644 index 00000000..5b5f8cf4 --- /dev/null +++ b/node_modules/minimatch/minimatch.js @@ -0,0 +1,923 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = require('path') +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd + } + + // should be unreachable. + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json new file mode 100644 index 00000000..c4514c80 --- /dev/null +++ b/node_modules/minimatch/package.json @@ -0,0 +1,30 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "3.0.4", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap test/*.js --cov", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "devDependencies": { + "tap": "^10.3.2" + }, + "license": "ISC", + "files": [ + "minimatch.js" + ] +} diff --git a/node_modules/minimist/.travis.yml b/node_modules/minimist/.travis.yml new file mode 100644 index 00000000..cc4dba29 --- /dev/null +++ b/node_modules/minimist/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/minimist/LICENSE b/node_modules/minimist/LICENSE new file mode 100644 index 00000000..ee27ba4b --- /dev/null +++ b/node_modules/minimist/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/minimist/example/parse.js b/node_modules/minimist/example/parse.js new file mode 100644 index 00000000..abff3e8e --- /dev/null +++ b/node_modules/minimist/example/parse.js @@ -0,0 +1,2 @@ +var argv = require('../')(process.argv.slice(2)); +console.dir(argv); diff --git a/node_modules/minimist/index.js b/node_modules/minimist/index.js new file mode 100644 index 00000000..584f551a --- /dev/null +++ b/node_modules/minimist/index.js @@ -0,0 +1,187 @@ +module.exports = function (args, opts) { + if (!opts) opts = {}; + + var flags = { bools : {}, strings : {} }; + + [].concat(opts['boolean']).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + }); + + [].concat(opts.string).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + }); + + var aliases = {}; + Object.keys(opts.alias || {}).forEach(function (key) { + aliases[key] = [].concat(opts.alias[key]); + aliases[key].forEach(function (x) { + aliases[x] = [key].concat(aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + + var defaults = opts['default'] || {}; + + var argv = { _ : [] }; + Object.keys(flags.bools).forEach(function (key) { + setArg(key, defaults[key] === undefined ? false : defaults[key]); + }); + + var notFlags = []; + + if (args.indexOf('--') !== -1) { + notFlags = args.slice(args.indexOf('--')+1); + args = args.slice(0, args.indexOf('--')); + } + + function setArg (key, val) { + var value = !flags.strings[key] && isNumber(val) + ? Number(val) : val + ; + setKey(argv, key.split('.'), value); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), value); + }); + } + + for (var i = 0; i < args.length; i++) { + var arg = args[i]; + + if (/^--.+=/.test(arg)) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); + setArg(m[1], m[2]); + } + else if (/^--no-.+/.test(arg)) { + var key = arg.match(/^--no-(.+)/)[1]; + setArg(key, false); + } + else if (/^--.+/.test(arg)) { + var key = arg.match(/^--(.+)/)[1]; + var next = args[i + 1]; + if (next !== undefined && !/^-/.test(next) + && !flags.bools[key] + && (aliases[key] ? !flags.bools[aliases[key]] : true)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next === 'true'); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true); + } + } + else if (/^-[^-]+/.test(arg)) { + var letters = arg.slice(1,-1).split(''); + + var broken = false; + for (var j = 0; j < letters.length; j++) { + var next = arg.slice(j+2); + + if (next === '-') { + setArg(letters[j], next) + continue; + } + + if (/[A-Za-z]/.test(letters[j]) + && /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) { + setArg(letters[j], next); + broken = true; + break; + } + + if (letters[j+1] && letters[j+1].match(/\W/)) { + setArg(letters[j], arg.slice(j+2)); + broken = true; + break; + } + else { + setArg(letters[j], flags.strings[letters[j]] ? '' : true); + } + } + + var key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1]) + && !flags.bools[key] + && (aliases[key] ? !flags.bools[aliases[key]] : true)) { + setArg(key, args[i+1]); + i++; + } + else if (args[i+1] && /true|false/.test(args[i+1])) { + setArg(key, args[i+1] === 'true'); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true); + } + } + } + else { + argv._.push( + flags.strings['_'] || !isNumber(arg) ? arg : Number(arg) + ); + } + } + + Object.keys(defaults).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) { + setKey(argv, key.split('.'), defaults[key]); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), defaults[key]); + }); + } + }); + + notFlags.forEach(function(key) { + argv._.push(key); + }); + + return argv; +}; + +function hasKey (obj, keys) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + o = (o[key] || {}); + }); + + var key = keys[keys.length - 1]; + return key in o; +} + +function setKey (obj, keys, value) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + if (o[key] === undefined) o[key] = {}; + o = o[key]; + }); + + var key = keys[keys.length - 1]; + if (o[key] === undefined || typeof o[key] === 'boolean') { + o[key] = value; + } + else if (Array.isArray(o[key])) { + o[key].push(value); + } + else { + o[key] = [ o[key], value ]; + } +} + +function isNumber (x) { + if (typeof x === 'number') return true; + if (/^0x[0-9a-f]+$/i.test(x)) return true; + return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} + +function longest (xs) { + return Math.max.apply(null, xs.map(function (x) { return x.length })); +} diff --git a/node_modules/minimist/package.json b/node_modules/minimist/package.json new file mode 100644 index 00000000..af6250bd --- /dev/null +++ b/node_modules/minimist/package.json @@ -0,0 +1,40 @@ +{ + "name": "minimist", + "version": "0.0.8", + "description": "parse argument options", + "main": "index.js", + "devDependencies": { + "tape": "~1.0.4", + "tap": "~0.4.0" + }, + "scripts": { + "test": "tap test/*.js" + }, + "testling" : { + "files" : "test/*.js", + "browsers" : [ + "ie/6..latest", + "ff/5", "firefox/latest", + "chrome/10", "chrome/latest", + "safari/5.1", "safari/latest", + "opera/12" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/substack/minimist.git" + }, + "homepage": "https://github.com/substack/minimist", + "keywords": [ + "argv", + "getopt", + "parser", + "optimist" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT" +} diff --git a/node_modules/minimist/readme.markdown b/node_modules/minimist/readme.markdown new file mode 100644 index 00000000..c2563532 --- /dev/null +++ b/node_modules/minimist/readme.markdown @@ -0,0 +1,73 @@ +# minimist + +parse argument options + +This module is the guts of optimist's argument parser without all the +fanciful decoration. + +[![browser support](https://ci.testling.com/substack/minimist.png)](http://ci.testling.com/substack/minimist) + +[![build status](https://secure.travis-ci.org/substack/minimist.png)](http://travis-ci.org/substack/minimist) + +# example + +``` js +var argv = require('minimist')(process.argv.slice(2)); +console.dir(argv); +``` + +``` +$ node example/parse.js -a beep -b boop +{ _: [], a: 'beep', b: 'boop' } +``` + +``` +$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz +{ _: [ 'foo', 'bar', 'baz' ], + x: 3, + y: 4, + n: 5, + a: true, + b: true, + c: true, + beep: 'boop' } +``` + +# methods + +``` js +var parseArgs = require('minimist') +``` + +## var argv = parseArgs(args, opts={}) + +Return an argument object `argv` populated with the array arguments from `args`. + +`argv._` contains all the arguments that didn't have an option associated with +them. + +Numeric-looking arguments will be returned as numbers unless `opts.string` or +`opts.boolean` is set for that argument name. + +Any arguments after `'--'` will not be parsed and will end up in `argv._`. + +options can be: + +* `opts.string` - a string or array of strings argument names to always treat as +strings +* `opts.boolean` - a string or array of strings to always treat as booleans +* `opts.alias` - an object mapping string names to strings or arrays of string +argument names to use as aliases +* `opts.default` - an object mapping string argument names to default values + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install minimist +``` + +# license + +MIT diff --git a/node_modules/minimist/test/dash.js b/node_modules/minimist/test/dash.js new file mode 100644 index 00000000..8b034b99 --- /dev/null +++ b/node_modules/minimist/test/dash.js @@ -0,0 +1,24 @@ +var parse = require('../'); +var test = require('tape'); + +test('-', function (t) { + t.plan(5); + t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] }); + t.deepEqual(parse([ '-' ]), { _: [ '-' ] }); + t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] }); + t.deepEqual( + parse([ '-b', '-' ], { boolean: 'b' }), + { b: true, _: [ '-' ] } + ); + t.deepEqual( + parse([ '-s', '-' ], { string: 's' }), + { s: '-', _: [] } + ); +}); + +test('-a -- b', function (t) { + t.plan(3); + t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); +}); diff --git a/node_modules/minimist/test/default_bool.js b/node_modules/minimist/test/default_bool.js new file mode 100644 index 00000000..f0041ee4 --- /dev/null +++ b/node_modules/minimist/test/default_bool.js @@ -0,0 +1,20 @@ +var test = require('tape'); +var parse = require('../'); + +test('boolean default true', function (t) { + var argv = parse([], { + boolean: 'sometrue', + default: { sometrue: true } + }); + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = parse([], { + boolean: 'somefalse', + default: { somefalse: false } + }); + t.equal(argv.somefalse, false); + t.end(); +}); diff --git a/node_modules/minimist/test/dotted.js b/node_modules/minimist/test/dotted.js new file mode 100644 index 00000000..ef0ae349 --- /dev/null +++ b/node_modules/minimist/test/dotted.js @@ -0,0 +1,16 @@ +var parse = require('../'); +var test = require('tape'); + +test('dotted alias', function (t) { + var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 22); + t.equal(argv.aa.bb, 22); + t.end(); +}); + +test('dotted default', function (t) { + var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 11); + t.equal(argv.aa.bb, 11); + t.end(); +}); diff --git a/node_modules/minimist/test/long.js b/node_modules/minimist/test/long.js new file mode 100644 index 00000000..5d3a1e09 --- /dev/null +++ b/node_modules/minimist/test/long.js @@ -0,0 +1,31 @@ +var test = require('tape'); +var parse = require('../'); + +test('long opts', function (t) { + t.deepEqual( + parse([ '--bool' ]), + { bool : true, _ : [] }, + 'long boolean' + ); + t.deepEqual( + parse([ '--pow', 'xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture sp' + ); + t.deepEqual( + parse([ '--pow=xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture eq' + ); + t.deepEqual( + parse([ '--host', 'localhost', '--port', '555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures sp' + ); + t.deepEqual( + parse([ '--host=localhost', '--port=555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures eq' + ); + t.end(); +}); diff --git a/node_modules/minimist/test/parse.js b/node_modules/minimist/test/parse.js new file mode 100644 index 00000000..8a906466 --- /dev/null +++ b/node_modules/minimist/test/parse.js @@ -0,0 +1,318 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse args', function (t) { + t.deepEqual( + parse([ '--no-moo' ]), + { moo : false, _ : [] }, + 'no' + ); + t.deepEqual( + parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]), + { v : ['a','b','c'], _ : [] }, + 'multi' + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.deepEqual( + parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek' + ]), + { + c : true, + a : true, + t : true, + s : 'woo', + h : 'awesome', + b : true, + bool : true, + key : 'value', + multi : [ 'quux', 'baz' ], + meep : false, + name : 'meowmers', + _ : [ 'bare', '--not-a-flag', 'eek' ] + } + ); + t.end(); +}); + +test('nums', function (t) { + var argv = parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789' + ]); + t.deepEqual(argv, { + x : 1234, + y : 5.67, + z : 1e7, + w : '10f', + hex : 0xdeadbeef, + _ : [ 789 ] + }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv.y, 'number'); + t.deepEqual(typeof argv.z, 'number'); + t.deepEqual(typeof argv.w, 'string'); + t.deepEqual(typeof argv.hex, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); + +test('flag boolean', function (t) { + var argv = parse([ '-t', 'moo' ], { boolean: 't' }); + t.deepEqual(argv, { t : true, _ : [ 'moo' ] }); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], { + boolean: [ 't', 'verbose' ], + default: { verbose: true } + }); + + t.deepEqual(argv, { + verbose: false, + t: true, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean default false', function (t) { + var argv = parse(['moo'], { + boolean: ['t', 'verbose'], + default: { verbose: false, t: false } + }); + + t.deepEqual(argv, { + verbose: false, + t: false, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], { + boolean: ['x','y','z'] + }); + + t.deepEqual(argv, { + x : true, + y : false, + z : true, + _ : [ 'one', 'two', 'three' ] + }); + + t.deepEqual(typeof argv.x, 'boolean'); + t.deepEqual(typeof argv.y, 'boolean'); + t.deepEqual(typeof argv.z, 'boolean'); + t.end(); +}); + +test('newlines in params' , function (t) { + var args = parse([ '-s', "X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = parse([ "--s=X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + t.end(); +}); + +test('strings' , function (t) { + var s = parse([ '-s', '0001234' ], { string: 's' }).s; + t.equal(s, '0001234'); + t.equal(typeof s, 'string'); + + var x = parse([ '-x', '56' ], { string: 'x' }).x; + t.equal(x, '56'); + t.equal(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = parse([ ' ', ' ' ], { string: '_' })._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('empty strings', function(t) { + var s = parse([ '-s' ], { string: 's' }).s; + t.equal(s, ''); + t.equal(typeof s, 'string'); + + var str = parse([ '--str' ], { string: 'str' }).str; + t.equal(str, ''); + t.equal(typeof str, 'string'); + + var letters = parse([ '-art' ], { + string: [ 'a', 't' ] + }); + + t.equal(letters.a, ''); + t.equal(letters.r, true); + t.equal(letters.t, ''); + + t.end(); +}); + + +test('slashBreak', function (t) { + t.same( + parse([ '-I/foo/bar/baz' ]), + { I : '/foo/bar/baz', _ : [] } + ); + t.same( + parse([ '-xyz/foo/bar/baz' ]), + { x : true, y : true, z : '/foo/bar/baz', _ : [] } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: 'zoom' } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: [ 'zm', 'zoom' ] } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = parse([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop' + ]); + + t.same(argv.foo, { + bar : 3, + baz : 4, + quux : { + quibble : 5, + o_O : true + } + }); + t.same(argv.beep, { boop : true }); + t.end(); +}); + +test('boolean and alias with chainable api', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + herp: { alias: 'h', boolean: true } + }; + var aliasedArgv = parse(aliased, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var propertyArgv = parse(regular, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + alias: { 'h': 'herp' }, + boolean: 'herp' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = [ '-h', 'true' ]; + var regular = [ '--herp', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function(t) { + var parsed = parse(['--boool', '--other=true'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = parse(['--boool', '--other=false'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); diff --git a/node_modules/minimist/test/parse_modified.js b/node_modules/minimist/test/parse_modified.js new file mode 100644 index 00000000..21851b03 --- /dev/null +++ b/node_modules/minimist/test/parse_modified.js @@ -0,0 +1,9 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse with modifier functions' , function (t) { + t.plan(1); + + var argv = parse([ '-b', '123' ], { boolean: 'b' }); + t.deepEqual(argv, { b: true, _: ['123'] }); +}); diff --git a/node_modules/minimist/test/short.js b/node_modules/minimist/test/short.js new file mode 100644 index 00000000..d513a1c2 --- /dev/null +++ b/node_modules/minimist/test/short.js @@ -0,0 +1,67 @@ +var parse = require('../'); +var test = require('tape'); + +test('numeric short args', function (t) { + t.plan(2); + t.deepEqual(parse([ '-n123' ]), { n: 123, _: [] }); + t.deepEqual( + parse([ '-123', '456' ]), + { 1: true, 2: true, 3: 456, _: [] } + ); +}); + +test('short', function (t) { + t.deepEqual( + parse([ '-b' ]), + { b : true, _ : [] }, + 'short boolean' + ); + t.deepEqual( + parse([ 'foo', 'bar', 'baz' ]), + { _ : [ 'foo', 'bar', 'baz' ] }, + 'bare' + ); + t.deepEqual( + parse([ '-cats' ]), + { c : true, a : true, t : true, s : true, _ : [] }, + 'group' + ); + t.deepEqual( + parse([ '-cats', 'meow' ]), + { c : true, a : true, t : true, s : 'meow', _ : [] }, + 'short group next' + ); + t.deepEqual( + parse([ '-h', 'localhost' ]), + { h : 'localhost', _ : [] }, + 'short capture' + ); + t.deepEqual( + parse([ '-h', 'localhost', '-p', '555' ]), + { h : 'localhost', p : 555, _ : [] }, + 'short captures' + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.deepEqual( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); diff --git a/node_modules/minimist/test/whitespace.js b/node_modules/minimist/test/whitespace.js new file mode 100644 index 00000000..8a52a58c --- /dev/null +++ b/node_modules/minimist/test/whitespace.js @@ -0,0 +1,8 @@ +var parse = require('../'); +var test = require('tape'); + +test('whitespace should be whitespace' , function (t) { + t.plan(1); + var x = parse([ '-x', '\t' ]).x; + t.equal(x, '\t'); +}); diff --git a/node_modules/minipass/LICENSE b/node_modules/minipass/LICENSE new file mode 100644 index 00000000..20a47625 --- /dev/null +++ b/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minipass/README.md b/node_modules/minipass/README.md new file mode 100644 index 00000000..7a83c59f --- /dev/null +++ b/node_modules/minipass/README.md @@ -0,0 +1,124 @@ +# minipass + +A _very_ minimal implementation of a [PassThrough +stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough) + +[It's very +fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0) +for objects, strings, and buffers. + +Supports pipe()ing (including multi-pipe() and backpressure +transmission), buffering data until either a `data` event handler or +`pipe()` is added (so you don't lose the first chunk), and most other +cases where PassThrough is a good idea. + +There is a `read()` method, but it's much more efficient to consume +data from this stream via `'data'` events or by calling `pipe()` into +some other stream. Calling `read()` requires the buffer to be +flattened in some cases, which requires copying memory. + +There is also no `unpipe()` method. Once you start piping, there is +no stopping it! + +If you set `objectMode: true` in the options, then whatever is written +will be emitted. Otherwise, it'll do a minimal amount of Buffer +copying to ensure proper Streams semantics when `read(n)` is called. + +This is not a `through` or `through2` stream. It doesn't transform +the data, it just passes it right through. If you want to transform +the data, extend the class, and override the `write()` method. Once +you're done transforming the data however you want, call +`super.write()` with the transform output. + +For an example of a stream that extends MiniPass to provide transform +capabilities, check out [minizlib](http://npm.im/minizlib). + +## USAGE + +```js +const MiniPass = require('minipass') +const mp = new MiniPass(options) // optional: { encoding } +mp.write('foo') +mp.pipe(someOtherStream) +mp.end('bar') +``` + +### collecting + +```js +mp.collect().then(all => { + // all is an array of all the data emitted + // encoding is supported in this case, so + // so the result will be a collection of strings if + // an encoding is specified, or buffers/objects if not. + // + // In an async function, you may do + // const data = await stream.collect() +}) +``` + +### iteration + +You can iterate over streams synchronously or asynchronously in +platforms that support it. + +Synchronous iteration will end when the currently available data is +consumed, even if the `end` event has not been reached. In string and +buffer mode, the data is concatenated, so unless multiple writes are +occurring in the same tick as the `read()`, sync iteration loops will +generally only have a single iteration. + +To consume chunks in this way exactly as they have been written, with +no flattening, create the stream with the `{ objectMode: true }` +option. + +```js +const mp = new Minipass({ objectMode: true }) +mp.write('a') +mp.write('b') +for (let letter of mp) { + console.log(letter) // a, b +} +mp.write('c') +mp.write('d') +for (let letter of mp) { + console.log(letter) // c, d +} +mp.write('e') +mp.end() +for (let letter of mp) { + console.log(letter) // e +} +for (let letter of mp) { + console.log(letter) // nothing +} +``` + +Asynchronous iteration will continue until the end event is reached, +consuming all of the data. + +```js +const mp = new Minipass({ encoding: 'utf8' }) + +// some source of some data +let i = 5 +const inter = setInterval(() => { + if (i --> 0) + mp.write(Buffer.from('foo\n', 'utf8')) + else { + mp.end() + clearInterval(inter) + } +}, 100) + +// consume the data with asynchronous iteration +async function consume () { + for await (let chunk of mp) { + console.log(chunk) + } + return 'ok' +} + +consume().then(res => console.log(res)) +// logs `foo\n` 5 times, and then `ok` +``` diff --git a/node_modules/minipass/index.js b/node_modules/minipass/index.js new file mode 100644 index 00000000..de472c36 --- /dev/null +++ b/node_modules/minipass/index.js @@ -0,0 +1,375 @@ +'use strict' +const EE = require('events') +const Yallist = require('yallist') +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const doIter = process.env._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = doIter && Symbol.asyncIterator || Symbol('asyncIterator not implemented') +const ITERATOR = doIter && Symbol.iterator || Symbol('iterator not implemented') +const FLUSHCHUNK = Symbol('flushChunk') +const SD = require('string_decoder').StringDecoder +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const RESUME = Symbol('resume') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') + +// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from +// or Buffer.alloc, and Buffer in node 10 deprecated the ctor. +// .M, this is fine .\^/M.. +let B = Buffer +/* istanbul ignore next */ +if (!B.alloc) { + B = require('safe-buffer').Buffer +} + +module.exports = class MiniPass extends EE { + constructor (options) { + super() + this[FLOWING] = false + this.pipes = new Yallist() + this.buffer = new Yallist() + this[OBJECTMODE] = options && options.objectMode || false + if (this[OBJECTMODE]) + this[ENCODING] = null + else + this[ENCODING] = options && options.encoding || null + if (this[ENCODING] === 'buffer') + this[ENCODING] = null + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[CLOSED] = false + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + } + + get bufferLength () { return this[BUFFERLENGTH] } + + get encoding () { return this[ENCODING] } + set encoding (enc) { + if (this[OBJECTMODE]) + throw new Error('cannot set encoding in objectMode') + + if (this[ENCODING] && enc !== this[ENCODING] && + (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this.buffer.length) + this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding (enc) { + this.encoding = enc + } + + write (chunk, encoding, cb) { + if (this[EOF]) + throw new Error('write after end') + + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + + if (!encoding) + encoding = 'utf8' + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && !this[OBJECTMODE] && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { + chunk = B.from(chunk, encoding) + } + + if (B.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + try { + return this.flowing + ? (this.emit('data', chunk), this.flowing) + : (this[BUFFERPUSH](chunk), false) + } finally { + this.emit('readable') + if (cb) + cb() + } + } + + read (n) { + try { + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) + return null + + if (this[OBJECTMODE]) + n = null + + if (this.buffer.length > 1 && !this[OBJECTMODE]) { + if (this.encoding) + this.buffer = new Yallist([ + Array.from(this.buffer).join('') + ]) + else + this.buffer = new Yallist([ + B.concat(Array.from(this.buffer), this[BUFFERLENGTH]) + ]) + } + + return this[READ](n || null, this.buffer.head.value) + } finally { + this[MAYBE_EMIT_END]() + } + } + + [READ] (n, chunk) { + if (n === chunk.length || n === null) + this[BUFFERSHIFT]() + else { + this.buffer.head.value = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this.buffer.length && !this[EOF]) + this.emit('drain') + + return chunk + } + + end (chunk, encoding, cb) { + if (typeof chunk === 'function') + cb = chunk, chunk = null + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + if (chunk) + this.write(chunk, encoding) + if (cb) + this.once('end', cb) + this[EOF] = true + this.writable = false + if (this.flowing) + this[MAYBE_EMIT_END]() + } + + // don't let the internal resume be overwritten + [RESUME] () { + this[FLOWING] = true + this.emit('resume') + if (this.buffer.length) + this[FLUSH]() + else if (this[EOF]) + this[MAYBE_EMIT_END]() + else + this.emit('drain') + } + + resume () { + return this[RESUME]() + } + + pause () { + this[FLOWING] = false + } + + get flowing () { + return this[FLOWING] + } + + [BUFFERPUSH] (chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1 + else + this[BUFFERLENGTH] += chunk.length + return this.buffer.push(chunk) + } + + [BUFFERSHIFT] () { + if (this.buffer.length) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1 + else + this[BUFFERLENGTH] -= this.buffer.head.value.length + } + return this.buffer.shift() + } + + [FLUSH] () { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) + + if (!this.buffer.length && !this[EOF]) + this.emit('drain') + } + + [FLUSHCHUNK] (chunk) { + return chunk ? (this.emit('data', chunk), this.flowing) : false + } + + pipe (dest, opts) { + if (dest === process.stdout || dest === process.stderr) + (opts = opts || {}).end = false + const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() } + this.pipes.push(p) + + dest.on('drain', p.ondrain) + this[RESUME]() + return dest + } + + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + try { + return super.on(ev, fn) + } finally { + if (ev === 'data' && !this.pipes.length && !this.flowing) + this[RESUME]() + else if (ev === 'end' && this[EMITTED_END]) { + super.emit('end') + this.removeAllListeners('end') + } + } + } + + get emittedEnd () { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END] () { + if (!this[EMITTED_END] && this.buffer.length === 0 && this[EOF]) { + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) + this.emit('close') + } + } + + emit (ev, data) { + if (ev === 'data') { + if (!data) + return + + if (this.pipes.length) + this.pipes.forEach(p => p.dest.write(data) || this.pause()) + } else if (ev === 'end') { + if (this[EMITTED_END] === true) + return + + this[EMITTED_END] = true + this.readable = false + + if (this[DECODER]) { + data = this[DECODER].end() + if (data) { + this.pipes.forEach(p => p.dest.write(data)) + super.emit('data', data) + } + } + + this.pipes.forEach(p => { + p.dest.removeListener('drain', p.ondrain) + if (!p.opts || p.opts.end !== false) + p.dest.end() + }) + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END]) + return + } + + const args = new Array(arguments.length) + args[0] = ev + args[1] = data + if (arguments.length > 2) { + for (let i = 2; i < arguments.length; i++) { + args[i] = arguments[i] + } + } + + try { + return super.emit.apply(this, args) + } finally { + if (ev !== 'end') + this[MAYBE_EMIT_END]() + else + this.removeAllListeners('end') + } + } + + // const all = await stream.collect() + collect () { + return new Promise((resolve, reject) => { + const buf = [] + this.on('data', c => buf.push(c)) + this.on('end', () => resolve(buf)) + this.on('error', reject) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR] () { + const next = () => { + const res = this.read() + if (res !== null) + return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) + return Promise.resolve({ done: true }) + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + resolve({ done: true }) + } + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { next } + } + + // for (let chunk of stream) + [ITERATOR] () { + const next = () => { + const value = this.read() + const done = value === null + return { value, done } + } + return { next } + } +} diff --git a/node_modules/minipass/node_modules/safe-buffer/LICENSE b/node_modules/minipass/node_modules/safe-buffer/LICENSE new file mode 100644 index 00000000..0c068cee --- /dev/null +++ b/node_modules/minipass/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/minipass/node_modules/safe-buffer/README.md b/node_modules/minipass/node_modules/safe-buffer/README.md new file mode 100644 index 00000000..356e3519 --- /dev/null +++ b/node_modules/minipass/node_modules/safe-buffer/README.md @@ -0,0 +1,586 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +[Get supported safe-buffer with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-safe-buffer?utm_source=npm-safe-buffer&utm_medium=referral&utm_campaign=readme) + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/minipass/node_modules/safe-buffer/index.d.ts b/node_modules/minipass/node_modules/safe-buffer/index.d.ts new file mode 100644 index 00000000..e9fed809 --- /dev/null +++ b/node_modules/minipass/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/minipass/node_modules/safe-buffer/index.js b/node_modules/minipass/node_modules/safe-buffer/index.js new file mode 100644 index 00000000..054c8d30 --- /dev/null +++ b/node_modules/minipass/node_modules/safe-buffer/index.js @@ -0,0 +1,64 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/minipass/node_modules/safe-buffer/package.json b/node_modules/minipass/node_modules/safe-buffer/package.json new file mode 100644 index 00000000..d532dafb --- /dev/null +++ b/node_modules/minipass/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.0", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/minipass/package.json b/node_modules/minipass/package.json new file mode 100644 index 00000000..29370421 --- /dev/null +++ b/node_modules/minipass/package.json @@ -0,0 +1,34 @@ +{ + "name": "minipass", + "version": "2.3.5", + "description": "minimal implementation of a PassThrough stream", + "main": "index.js", + "dependencies": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + }, + "devDependencies": { + "end-of-stream": "^1.4.0", + "tap": "^12.0.1", + "through2": "^2.0.3" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.js" + ] +} diff --git a/node_modules/minizlib/LICENSE b/node_modules/minizlib/LICENSE new file mode 100644 index 00000000..ffce7383 --- /dev/null +++ b/node_modules/minizlib/LICENSE @@ -0,0 +1,26 @@ +Minizlib was created by Isaac Z. Schlueter. +It is a derivative work of the Node.js project. + +""" +Copyright Isaac Z. Schlueter and Contributors +Copyright Node.js contributors. All rights reserved. +Copyright Joyent, Inc. and other Node contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/node_modules/minizlib/README.md b/node_modules/minizlib/README.md new file mode 100644 index 00000000..2b585545 --- /dev/null +++ b/node_modules/minizlib/README.md @@ -0,0 +1,44 @@ +# minizlib + +A tiny fast zlib stream built on [minipass](http://npm.im/minipass) +and Node.js's zlib binding. + +This module was created to serve the needs of +[node-tar](http://npm.im/tar) v2. If your needs are different, then +it may not be for you. + +## How does this differ from the streams in `require('zlib')`? + +First, there are no convenience methods to compress or decompress a +buffer. If you want those, use the built-in `zlib` module. This is +only streams. + +This module compresses and decompresses the data as fast as you feed +it in. It is synchronous, and runs on the main process thread. Zlib +operations can be high CPU, but they're very fast, and doing it this +way means much less bookkeeping and artificial deferral. + +Node's built in zlib streams are built on top of `stream.Transform`. +They do the maximally safe thing with respect to consistent +asynchrony, buffering, and backpressure. + +This module _does_ support backpressure, and will buffer output chunks +that are not consumed, but is less of a mediator between the input and +output. There is no high or low watermarks, no state objects, and so +artificial async deferrals. It will not protect you from Zalgo. + +If you write, data will be emitted right away. If you write +everything synchronously in one tick, and you are listening to the +`data` event to consume it, then it'll all be emitted right away in +that same tick. If you want data to be emitted in the next tick, then +write it in the next tick. + +It is thus the responsibility of the reader and writer to manage their +own consumption and process execution flow. + +The goal is to compress and decompress as fast as possible, even for +files that are too large to store all in one buffer. + +The API is very similar to the built-in zlib module. There are +classes that you instantiate with `new` and they are streams that can +be piped together. diff --git a/node_modules/minizlib/constants.js b/node_modules/minizlib/constants.js new file mode 100644 index 00000000..4edffde8 --- /dev/null +++ b/node_modules/minizlib/constants.js @@ -0,0 +1,46 @@ +module.exports = Object.freeze({ + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + ZLIB_VERNUM: 4736, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1 +}) diff --git a/node_modules/minizlib/index.js b/node_modules/minizlib/index.js new file mode 100644 index 00000000..df486965 --- /dev/null +++ b/node_modules/minizlib/index.js @@ -0,0 +1,335 @@ +'use strict' + +const assert = require('assert') +const Buffer = require('buffer').Buffer +const realZlib = require('zlib') + +const constants = exports.constants = require('./constants.js') +const MiniPass = require('minipass') + +const OriginalBufferConcat = Buffer.concat + +class ZlibError extends Error { + constructor (msg, errno) { + super('zlib: ' + msg) + this.errno = errno + this.code = codes.get(errno) + } + + get name () { + return 'ZlibError' + } +} + +// translation table for return codes. +const codes = new Map([ + [constants.Z_OK, 'Z_OK'], + [constants.Z_STREAM_END, 'Z_STREAM_END'], + [constants.Z_NEED_DICT, 'Z_NEED_DICT'], + [constants.Z_ERRNO, 'Z_ERRNO'], + [constants.Z_STREAM_ERROR, 'Z_STREAM_ERROR'], + [constants.Z_DATA_ERROR, 'Z_DATA_ERROR'], + [constants.Z_MEM_ERROR, 'Z_MEM_ERROR'], + [constants.Z_BUF_ERROR, 'Z_BUF_ERROR'], + [constants.Z_VERSION_ERROR, 'Z_VERSION_ERROR'] +]) + +const validFlushFlags = new Set([ + constants.Z_NO_FLUSH, + constants.Z_PARTIAL_FLUSH, + constants.Z_SYNC_FLUSH, + constants.Z_FULL_FLUSH, + constants.Z_FINISH, + constants.Z_BLOCK +]) + +const strategies = new Set([ + constants.Z_FILTERED, + constants.Z_HUFFMAN_ONLY, + constants.Z_RLE, + constants.Z_FIXED, + constants.Z_DEFAULT_STRATEGY +]) + +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _opts = Symbol('opts') +const _flushFlag = Symbol('flushFlag') +const _finishFlush = Symbol('finishFlush') +const _handle = Symbol('handle') +const _onError = Symbol('onError') +const _level = Symbol('level') +const _strategy = Symbol('strategy') +const _ended = Symbol('ended') + +class Zlib extends MiniPass { + constructor (opts, mode) { + super(opts) + this[_ended] = false + this[_opts] = opts = opts || {} + if (opts.flush && !validFlushFlags.has(opts.flush)) { + throw new TypeError('Invalid flush flag: ' + opts.flush) + } + if (opts.finishFlush && !validFlushFlags.has(opts.finishFlush)) { + throw new TypeError('Invalid flush flag: ' + opts.finishFlush) + } + + this[_flushFlag] = opts.flush || constants.Z_NO_FLUSH + this[_finishFlush] = typeof opts.finishFlush !== 'undefined' ? + opts.finishFlush : constants.Z_FINISH + + if (opts.chunkSize) { + if (opts.chunkSize < constants.Z_MIN_CHUNK) { + throw new RangeError('Invalid chunk size: ' + opts.chunkSize) + } + } + + if (opts.windowBits) { + if (opts.windowBits < constants.Z_MIN_WINDOWBITS || + opts.windowBits > constants.Z_MAX_WINDOWBITS) { + throw new RangeError('Invalid windowBits: ' + opts.windowBits) + } + } + + if (opts.level) { + if (opts.level < constants.Z_MIN_LEVEL || + opts.level > constants.Z_MAX_LEVEL) { + throw new RangeError('Invalid compression level: ' + opts.level) + } + } + + if (opts.memLevel) { + if (opts.memLevel < constants.Z_MIN_MEMLEVEL || + opts.memLevel > constants.Z_MAX_MEMLEVEL) { + throw new RangeError('Invalid memLevel: ' + opts.memLevel) + } + } + + if (opts.strategy && !(strategies.has(opts.strategy))) + throw new TypeError('Invalid strategy: ' + opts.strategy) + + if (opts.dictionary) { + if (!(opts.dictionary instanceof Buffer)) { + throw new TypeError('Invalid dictionary: it should be a Buffer instance') + } + } + + this[_handle] = new realZlib[mode](opts) + + this[_onError] = (err) => { + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close() + + const error = new ZlibError(err.message, err.errno) + this.emit('error', error) + } + this[_handle].on('error', this[_onError]) + + const level = typeof opts.level === 'number' ? opts.level + : constants.Z_DEFAULT_COMPRESSION + + var strategy = typeof opts.strategy === 'number' ? opts.strategy + : constants.Z_DEFAULT_STRATEGY + + // API changed in node v9 + /* istanbul ignore next */ + + this[_level] = level + this[_strategy] = strategy + + this.once('end', this.close) + } + + close () { + if (this[_handle]) { + this[_handle].close() + this[_handle] = null + this.emit('close') + } + } + + params (level, strategy) { + if (!this[_handle]) + throw new Error('cannot switch params when binding is closed') + + // no way to test this without also not supporting params at all + /* istanbul ignore if */ + if (!this[_handle].params) + throw new Error('not supported in this implementation') + + if (level < constants.Z_MIN_LEVEL || + level > constants.Z_MAX_LEVEL) { + throw new RangeError('Invalid compression level: ' + level) + } + + if (!(strategies.has(strategy))) + throw new TypeError('Invalid strategy: ' + strategy) + + if (this[_level] !== level || this[_strategy] !== strategy) { + this.flush(constants.Z_SYNC_FLUSH) + assert(this[_handle], 'zlib binding closed') + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this[_handle].flush + this[_handle].flush = (flushFlag, cb) => { + this[_handle].flush = origFlush + this.flush(flushFlag) + cb() + } + this[_handle].params(level, strategy) + /* istanbul ignore else */ + if (this[_handle]) { + this[_level] = level + this[_strategy] = strategy + } + } + } + + reset () { + assert(this[_handle], 'zlib binding closed') + return this[_handle].reset() + } + + flush (kind) { + if (kind === undefined) + kind = constants.Z_FULL_FLUSH + + if (this.ended) + return + + const flushFlag = this[_flushFlag] + this[_flushFlag] = kind + this.write(Buffer.alloc(0)) + this[_flushFlag] = flushFlag + } + + end (chunk, encoding, cb) { + if (chunk) + this.write(chunk, encoding) + this.flush(this[_finishFlush]) + this[_ended] = true + return super.end(null, null, cb) + } + + get ended () { + return this[_ended] + } + + write (chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding) + + assert(this[_handle], 'zlib binding closed') + + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + const nativeHandle = this[_handle]._handle + const originalNativeClose = nativeHandle.close + nativeHandle.close = () => {} + const originalClose = this[_handle].close + this[_handle].close = () => {} + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = (args) => args + let result + try { + result = this[_handle]._processChunk(chunk, this[_flushFlag]) + } catch (err) { + this[_onError](err) + } finally { + Buffer.concat = OriginalBufferConcat + if (this[_handle]) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + this[_handle]._handle = nativeHandle + nativeHandle.close = originalNativeClose + this[_handle].close = originalClose + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this[_handle].removeAllListeners('error') + } + } + + let writeReturn + if (result) { + if (Array.isArray(result) && result.length > 0) { + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = super.write(Buffer.from(result[0])) + for (let i = 1; i < result.length; i++) { + writeReturn = super.write(result[i]) + } + } else { + writeReturn = super.write(Buffer.from(result)) + } + } + + if (cb) + cb() + return writeReturn + } +} + +// minimal 2-byte header +class Deflate extends Zlib { + constructor (opts) { + super(opts, 'Deflate') + } +} + +class Inflate extends Zlib { + constructor (opts) { + super(opts, 'Inflate') + } +} + +// gzip - bigger header, same deflate compression +class Gzip extends Zlib { + constructor (opts) { + super(opts, 'Gzip') + } +} + +class Gunzip extends Zlib { + constructor (opts) { + super(opts, 'Gunzip') + } +} + +// raw - no header +class DeflateRaw extends Zlib { + constructor (opts) { + super(opts, 'DeflateRaw') + } +} + +class InflateRaw extends Zlib { + constructor (opts) { + super(opts, 'InflateRaw') + } +} + +// auto-detect header. +class Unzip extends Zlib { + constructor (opts) { + super(opts, 'Unzip') + } +} + +exports.Deflate = Deflate +exports.Inflate = Inflate +exports.Gzip = Gzip +exports.Gunzip = Gunzip +exports.DeflateRaw = DeflateRaw +exports.InflateRaw = InflateRaw +exports.Unzip = Unzip diff --git a/node_modules/minizlib/package.json b/node_modules/minizlib/package.json new file mode 100644 index 00000000..20a47788 --- /dev/null +++ b/node_modules/minizlib/package.json @@ -0,0 +1,38 @@ +{ + "name": "minizlib", + "version": "1.2.1", + "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", + "main": "index.js", + "dependencies": { + "minipass": "^2.2.1" + }, + "scripts": { + "test": "tap test/*.js --100 -J", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minizlib.git" + }, + "keywords": [ + "zlib", + "gzip", + "gunzip", + "deflate", + "inflate", + "compression", + "zip", + "unzip" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "devDependencies": { + "tap": "^12.0.1" + }, + "files": [ + "index.js", + "constants.js" + ] +} diff --git a/node_modules/mixin-deep/LICENSE b/node_modules/mixin-deep/LICENSE new file mode 100644 index 00000000..99c93691 --- /dev/null +++ b/node_modules/mixin-deep/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, 2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/mixin-deep/README.md b/node_modules/mixin-deep/README.md new file mode 100644 index 00000000..111bde00 --- /dev/null +++ b/node_modules/mixin-deep/README.md @@ -0,0 +1,80 @@ +# mixin-deep [![NPM version](https://img.shields.io/npm/v/mixin-deep.svg?style=flat)](https://www.npmjs.com/package/mixin-deep) [![NPM monthly downloads](https://img.shields.io/npm/dm/mixin-deep.svg?style=flat)](https://npmjs.org/package/mixin-deep) [![NPM total downloads](https://img.shields.io/npm/dt/mixin-deep.svg?style=flat)](https://npmjs.org/package/mixin-deep) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/mixin-deep.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/mixin-deep) + +> Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save mixin-deep +``` + +## Usage + +```js +var mixinDeep = require('mixin-deep'); + +mixinDeep({a: {aa: 'aa'}}, {a: {bb: 'bb'}}, {a: {cc: 'cc'}}); +//=> { a: { aa: 'aa', bb: 'bb', cc: 'cc' } } +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [defaults-deep](https://www.npmjs.com/package/defaults-deep): Like `extend` but recursively copies only the missing properties/values to the target object. | [homepage](https://github.com/jonschlinkert/defaults-deep "Like `extend` but recursively copies only the missing properties/values to the target object.") +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [merge-deep](https://www.npmjs.com/package/merge-deep): Recursively merge values in a javascript object. | [homepage](https://github.com/jonschlinkert/merge-deep "Recursively merge values in a javascript object.") +* [mixin-object](https://www.npmjs.com/package/mixin-object): Mixin the own and inherited properties of other objects onto the first object. Pass an… [more](https://github.com/jonschlinkert/mixin-object) | [homepage](https://github.com/jonschlinkert/mixin-object "Mixin the own and inherited properties of other objects onto the first object. Pass an empty object as the first arg to shallow clone.") + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on December 09, 2017._ \ No newline at end of file diff --git a/node_modules/mixin-deep/index.js b/node_modules/mixin-deep/index.js new file mode 100644 index 00000000..47face2a --- /dev/null +++ b/node_modules/mixin-deep/index.js @@ -0,0 +1,64 @@ +'use strict'; + +var isExtendable = require('is-extendable'); +var forIn = require('for-in'); + +function mixinDeep(target, objects) { + var len = arguments.length, i = 0; + while (++i < len) { + var obj = arguments[i]; + if (isObject(obj)) { + forIn(obj, copy, target); + } + } + return target; +} + +/** + * Copy properties from the source object to the + * target object. + * + * @param {*} `val` + * @param {String} `key` + */ + +function copy(val, key) { + if (!isValidKey(key)) { + return; + } + + var obj = this[key]; + if (isObject(val) && isObject(obj)) { + mixinDeep(obj, val); + } else { + this[key] = val; + } +} + +/** + * Returns true if `val` is an object or function. + * + * @param {any} val + * @return {Boolean} + */ + +function isObject(val) { + return isExtendable(val) && !Array.isArray(val); +} + +/** + * Returns true if `key` is a valid key to use when extending objects. + * + * @param {String} `key` + * @return {Boolean} + */ + +function isValidKey(key) { + return key !== '__proto__' && key !== 'constructor' && key !== 'prototype'; +}; + +/** + * Expose `mixinDeep` + */ + +module.exports = mixinDeep; diff --git a/node_modules/mixin-deep/node_modules/is-extendable/LICENSE b/node_modules/mixin-deep/node_modules/is-extendable/LICENSE new file mode 100644 index 00000000..c0d7f136 --- /dev/null +++ b/node_modules/mixin-deep/node_modules/is-extendable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/mixin-deep/node_modules/is-extendable/README.md b/node_modules/mixin-deep/node_modules/is-extendable/README.md new file mode 100644 index 00000000..875b56a7 --- /dev/null +++ b/node_modules/mixin-deep/node_modules/is-extendable/README.md @@ -0,0 +1,88 @@ +# is-extendable [![NPM version](https://img.shields.io/npm/v/is-extendable.svg?style=flat)](https://www.npmjs.com/package/is-extendable) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![NPM total downloads](https://img.shields.io/npm/dt/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-extendable.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-extendable) + +> Returns true if a value is a plain object, array or function. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extendable +``` + +## Usage + +```js +var isExtendable = require('is-extendable'); +``` + +Returns true if the value is any of the following: + +* array +* plain object +* function + +## Notes + +All objects in JavaScript can have keys, but it's a pain to check for this, since we ether need to verify that the value is not `null` or `undefined` and: + +* the value is not a primitive, or +* that the object is a plain object, function or array + +Also note that an `extendable` object is not the same as an [extensible object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/isExtensible), which is one that (in es6) is not sealed, frozen, or marked as non-extensible using `preventExtensions`. + +## Release history + +### v1.0.0 - 2017/07/20 + +**Breaking changes** + +* No longer considers date, regex or error objects to be extendable + +## About + +### Related projects + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target… [more](https://github.com/jonschlinkert/assign-deep) | [homepage](https://github.com/jonschlinkert/assign-deep "Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target (first) object.") +* [is-equal-shallow](https://www.npmjs.com/package/is-equal-shallow): Does a shallow comparison of two objects, returning false if the keys or values differ. | [homepage](https://github.com/jonschlinkert/is-equal-shallow "Does a shallow comparison of two objects, returning false if the keys or values differ.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 20, 2017._ \ No newline at end of file diff --git a/node_modules/mixin-deep/node_modules/is-extendable/index.d.ts b/node_modules/mixin-deep/node_modules/is-extendable/index.d.ts new file mode 100644 index 00000000..b96d5075 --- /dev/null +++ b/node_modules/mixin-deep/node_modules/is-extendable/index.d.ts @@ -0,0 +1,5 @@ +export = isExtendable; + +declare function isExtendable(val: any): boolean; + +declare namespace isExtendable {} diff --git a/node_modules/mixin-deep/node_modules/is-extendable/index.js b/node_modules/mixin-deep/node_modules/is-extendable/index.js new file mode 100644 index 00000000..a8b26ad0 --- /dev/null +++ b/node_modules/mixin-deep/node_modules/is-extendable/index.js @@ -0,0 +1,14 @@ +/*! + * is-extendable + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isPlainObject = require('is-plain-object'); + +module.exports = function isExtendable(val) { + return isPlainObject(val) || typeof val === 'function' || Array.isArray(val); +}; diff --git a/node_modules/mixin-deep/node_modules/is-extendable/package.json b/node_modules/mixin-deep/node_modules/is-extendable/package.json new file mode 100644 index 00000000..2aaab65a --- /dev/null +++ b/node_modules/mixin-deep/node_modules/is-extendable/package.json @@ -0,0 +1,67 @@ +{ + "name": "is-extendable", + "description": "Returns true if a value is a plain object, array or function.", + "version": "1.0.1", + "homepage": "https://github.com/jonschlinkert/is-extendable", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extendable", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extendable/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "index.d.ts" + ], + "main": "index.js", + "types": "index.d.ts", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.4.2" + }, + "keywords": [ + "array", + "assign", + "check", + "date", + "extend", + "extendable", + "extensible", + "function", + "is", + "object", + "regex", + "test" + ], + "verb": { + "related": { + "list": [ + "assign-deep", + "is-equal-shallow", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/mixin-deep/package.json b/node_modules/mixin-deep/package.json new file mode 100644 index 00000000..0581c46a --- /dev/null +++ b/node_modules/mixin-deep/package.json @@ -0,0 +1,65 @@ +{ + "name": "mixin-deep", + "description": "Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone.", + "version": "1.3.2", + "homepage": "https://github.com/jonschlinkert/mixin-deep", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/mixin-deep", + "bugs": { + "url": "https://github.com/jonschlinkert/mixin-deep/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "for-in": "^1.0.2", + "is-extendable": "^1.0.1" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3", + "should": "^13.1.3" + }, + "keywords": [ + "deep", + "extend", + "key", + "keys", + "merge", + "mixin", + "object", + "prop", + "properties", + "util", + "values" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "defaults-deep", + "extend-shallow", + "merge-deep", + "mixin-object" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/mkdirp/.travis.yml b/node_modules/mkdirp/.travis.yml new file mode 100644 index 00000000..74c57bf1 --- /dev/null +++ b/node_modules/mkdirp/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.12" + - "iojs" +before_install: + - npm install -g npm@~1.4.6 diff --git a/node_modules/mkdirp/LICENSE b/node_modules/mkdirp/LICENSE new file mode 100644 index 00000000..432d1aeb --- /dev/null +++ b/node_modules/mkdirp/LICENSE @@ -0,0 +1,21 @@ +Copyright 2010 James Halliday (mail@substack.net) + +This project is free software released under the MIT/X11 license: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/mkdirp/bin/cmd.js b/node_modules/mkdirp/bin/cmd.js new file mode 100755 index 00000000..d95de15a --- /dev/null +++ b/node_modules/mkdirp/bin/cmd.js @@ -0,0 +1,33 @@ +#!/usr/bin/env node + +var mkdirp = require('../'); +var minimist = require('minimist'); +var fs = require('fs'); + +var argv = minimist(process.argv.slice(2), { + alias: { m: 'mode', h: 'help' }, + string: [ 'mode' ] +}); +if (argv.help) { + fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout); + return; +} + +var paths = argv._.slice(); +var mode = argv.mode ? parseInt(argv.mode, 8) : undefined; + +(function next () { + if (paths.length === 0) return; + var p = paths.shift(); + + if (mode === undefined) mkdirp(p, cb) + else mkdirp(p, mode, cb) + + function cb (err) { + if (err) { + console.error(err.message); + process.exit(1); + } + else next(); + } +})(); diff --git a/node_modules/mkdirp/bin/usage.txt b/node_modules/mkdirp/bin/usage.txt new file mode 100644 index 00000000..f952aa2c --- /dev/null +++ b/node_modules/mkdirp/bin/usage.txt @@ -0,0 +1,12 @@ +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories that + don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m, --mode If a directory needs to be created, set the mode as an octal + permission string. + diff --git a/node_modules/mkdirp/examples/pow.js b/node_modules/mkdirp/examples/pow.js new file mode 100644 index 00000000..e6924212 --- /dev/null +++ b/node_modules/mkdirp/examples/pow.js @@ -0,0 +1,6 @@ +var mkdirp = require('mkdirp'); + +mkdirp('/tmp/foo/bar/baz', function (err) { + if (err) console.error(err) + else console.log('pow!') +}); diff --git a/node_modules/mkdirp/index.js b/node_modules/mkdirp/index.js new file mode 100644 index 00000000..6ce241b5 --- /dev/null +++ b/node_modules/mkdirp/index.js @@ -0,0 +1,98 @@ +var path = require('path'); +var fs = require('fs'); +var _0777 = parseInt('0777', 8); + +module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; + +function mkdirP (p, opts, f, made) { + if (typeof opts === 'function') { + f = opts; + opts = {}; + } + else if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 & (~process.umask()); + } + if (!made) made = null; + + var cb = f || function () {}; + p = path.resolve(p); + + xfs.mkdir(p, mode, function (er) { + if (!er) { + made = made || p; + return cb(null, made); + } + switch (er.code) { + case 'ENOENT': + mkdirP(path.dirname(p), opts, function (er, made) { + if (er) cb(er, made); + else mkdirP(p, opts, cb, made); + }); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, function (er2, stat) { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) cb(er, made) + else cb(null, made); + }); + break; + } + }); +} + +mkdirP.sync = function sync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 & (~process.umask()); + } + if (!made) made = null; + + p = path.resolve(p); + + try { + xfs.mkdirSync(p, mode); + made = made || p; + } + catch (err0) { + switch (err0.code) { + case 'ENOENT' : + made = sync(path.dirname(p), opts, made); + sync(p, opts, made); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + var stat; + try { + stat = xfs.statSync(p); + } + catch (err1) { + throw err0; + } + if (!stat.isDirectory()) throw err0; + break; + } + } + + return made; +}; diff --git a/node_modules/mkdirp/package.json b/node_modules/mkdirp/package.json new file mode 100644 index 00000000..863e860d --- /dev/null +++ b/node_modules/mkdirp/package.json @@ -0,0 +1,27 @@ +{ + "name": "mkdirp", + "description": "Recursively mkdir, like `mkdir -p`", + "version": "0.5.1", + "author": "James Halliday (http://substack.net)", + "main": "index.js", + "keywords": [ + "mkdir", + "directory" + ], + "repository": { + "type": "git", + "url": "https://github.com/substack/node-mkdirp.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "dependencies": { + "minimist": "0.0.8" + }, + "devDependencies": { + "tap": "1", + "mock-fs": "2 >=2.7.0" + }, + "bin": "bin/cmd.js", + "license": "MIT" +} diff --git a/node_modules/mkdirp/readme.markdown b/node_modules/mkdirp/readme.markdown new file mode 100644 index 00000000..3cc13153 --- /dev/null +++ b/node_modules/mkdirp/readme.markdown @@ -0,0 +1,100 @@ +# mkdirp + +Like `mkdir -p`, but in node.js! + +[![build status](https://secure.travis-ci.org/substack/node-mkdirp.png)](http://travis-ci.org/substack/node-mkdirp) + +# example + +## pow.js + +```js +var mkdirp = require('mkdirp'); + +mkdirp('/tmp/foo/bar/baz', function (err) { + if (err) console.error(err) + else console.log('pow!') +}); +``` + +Output + +``` +pow! +``` + +And now /tmp/foo/bar/baz exists, huzzah! + +# methods + +```js +var mkdirp = require('mkdirp'); +``` + +## mkdirp(dir, opts, cb) + +Create a new directory and any necessary subdirectories at `dir` with octal +permission string `opts.mode`. If `opts` is a non-object, it will be treated as +the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0777 & (~process.umask())`. + +`cb(err, made)` fires with the error or the first directory `made` +that had to be created, if any. + +You can optionally pass in an alternate `fs` implementation by passing in +`opts.fs`. Your implementation should have `opts.fs.mkdir(path, mode, cb)` and +`opts.fs.stat(path, cb)`. + +## mkdirp.sync(dir, opts) + +Synchronously create a new directory and any necessary subdirectories at `dir` +with octal permission string `opts.mode`. If `opts` is a non-object, it will be +treated as the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0777 & (~process.umask())`. + +Returns the first directory that had to be created, if any. + +You can optionally pass in an alternate `fs` implementation by passing in +`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)` and +`opts.fs.statSync(path)`. + +# usage + +This package also ships with a `mkdirp` command. + +``` +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories that + don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m, --mode If a directory needs to be created, set the mode as an octal + permission string. + +``` + +# install + +With [npm](http://npmjs.org) do: + +``` +npm install mkdirp +``` + +to get the library, or + +``` +npm install -g mkdirp +``` + +to get the command. + +# license + +MIT diff --git a/node_modules/mkdirp/test/chmod.js b/node_modules/mkdirp/test/chmod.js new file mode 100644 index 00000000..6a404b93 --- /dev/null +++ b/node_modules/mkdirp/test/chmod.js @@ -0,0 +1,41 @@ +var mkdirp = require('../').mkdirp; +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); +var _0744 = parseInt('0744', 8); + +var ps = [ '', 'tmp' ]; + +for (var i = 0; i < 25; i++) { + var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + ps.push(dir); +} + +var file = ps.join('/'); + +test('chmod-pre', function (t) { + var mode = _0744 + mkdirp(file, mode, function (er) { + t.ifError(er, 'should not error'); + fs.stat(file, function (er, stat) { + t.ifError(er, 'should exist'); + t.ok(stat && stat.isDirectory(), 'should be directory'); + t.equal(stat && stat.mode & _0777, mode, 'should be 0744'); + t.end(); + }); + }); +}); + +test('chmod', function (t) { + var mode = _0755 + mkdirp(file, mode, function (er) { + t.ifError(er, 'should not error'); + fs.stat(file, function (er, stat) { + t.ifError(er, 'should exist'); + t.ok(stat && stat.isDirectory(), 'should be directory'); + t.end(); + }); + }); +}); diff --git a/node_modules/mkdirp/test/clobber.js b/node_modules/mkdirp/test/clobber.js new file mode 100644 index 00000000..2433b9ad --- /dev/null +++ b/node_modules/mkdirp/test/clobber.js @@ -0,0 +1,38 @@ +var mkdirp = require('../').mkdirp; +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; +var _0755 = parseInt('0755', 8); + +var ps = [ '', 'tmp' ]; + +for (var i = 0; i < 25; i++) { + var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + ps.push(dir); +} + +var file = ps.join('/'); + +// a file in the way +var itw = ps.slice(0, 3).join('/'); + + +test('clobber-pre', function (t) { + console.error("about to write to "+itw) + fs.writeFileSync(itw, 'I AM IN THE WAY, THE TRUTH, AND THE LIGHT.'); + + fs.stat(itw, function (er, stat) { + t.ifError(er) + t.ok(stat && stat.isFile(), 'should be file') + t.end() + }) +}) + +test('clobber', function (t) { + t.plan(2); + mkdirp(file, _0755, function (err) { + t.ok(err); + t.equal(err.code, 'ENOTDIR'); + t.end(); + }); +}); diff --git a/node_modules/mkdirp/test/mkdirp.js b/node_modules/mkdirp/test/mkdirp.js new file mode 100644 index 00000000..eaa8921c --- /dev/null +++ b/node_modules/mkdirp/test/mkdirp.js @@ -0,0 +1,28 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('woo', function (t) { + t.plan(5); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + mkdirp(file, _0755, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }) + }) + }); +}); diff --git a/node_modules/mkdirp/test/opts_fs.js b/node_modules/mkdirp/test/opts_fs.js new file mode 100644 index 00000000..97186b62 --- /dev/null +++ b/node_modules/mkdirp/test/opts_fs.js @@ -0,0 +1,29 @@ +var mkdirp = require('../'); +var path = require('path'); +var test = require('tap').test; +var mockfs = require('mock-fs'); +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('opts.fs', function (t) { + t.plan(5); + + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/beep/boop/' + [x,y,z].join('/'); + var xfs = mockfs.fs(); + + mkdirp(file, { fs: xfs, mode: _0755 }, function (err) { + t.ifError(err); + xfs.exists(file, function (ex) { + t.ok(ex, 'created file'); + xfs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); + }); +}); diff --git a/node_modules/mkdirp/test/opts_fs_sync.js b/node_modules/mkdirp/test/opts_fs_sync.js new file mode 100644 index 00000000..6c370aa6 --- /dev/null +++ b/node_modules/mkdirp/test/opts_fs_sync.js @@ -0,0 +1,27 @@ +var mkdirp = require('../'); +var path = require('path'); +var test = require('tap').test; +var mockfs = require('mock-fs'); +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('opts.fs sync', function (t) { + t.plan(4); + + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/beep/boop/' + [x,y,z].join('/'); + var xfs = mockfs.fs(); + + mkdirp.sync(file, { fs: xfs, mode: _0755 }); + xfs.exists(file, function (ex) { + t.ok(ex, 'created file'); + xfs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); +}); diff --git a/node_modules/mkdirp/test/perm.js b/node_modules/mkdirp/test/perm.js new file mode 100644 index 00000000..fbce44b8 --- /dev/null +++ b/node_modules/mkdirp/test/perm.js @@ -0,0 +1,32 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('async perm', function (t) { + t.plan(5); + var file = '/tmp/' + (Math.random() * (1<<30)).toString(16); + + mkdirp(file, _0755, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }) + }) + }); +}); + +test('async root perm', function (t) { + mkdirp('/tmp', _0755, function (err) { + if (err) t.fail(err); + t.end(); + }); + t.end(); +}); diff --git a/node_modules/mkdirp/test/perm_sync.js b/node_modules/mkdirp/test/perm_sync.js new file mode 100644 index 00000000..398229fe --- /dev/null +++ b/node_modules/mkdirp/test/perm_sync.js @@ -0,0 +1,36 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('sync perm', function (t) { + t.plan(4); + var file = '/tmp/' + (Math.random() * (1<<30)).toString(16) + '.json'; + + mkdirp.sync(file, _0755); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); +}); + +test('sync root perm', function (t) { + t.plan(3); + + var file = '/tmp'; + mkdirp.sync(file, _0755); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.ok(stat.isDirectory(), 'target not a directory'); + }) + }); +}); diff --git a/node_modules/mkdirp/test/race.js b/node_modules/mkdirp/test/race.js new file mode 100644 index 00000000..b0b9e183 --- /dev/null +++ b/node_modules/mkdirp/test/race.js @@ -0,0 +1,37 @@ +var mkdirp = require('../').mkdirp; +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('race', function (t) { + t.plan(10); + var ps = [ '', 'tmp' ]; + + for (var i = 0; i < 25; i++) { + var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + ps.push(dir); + } + var file = ps.join('/'); + + var res = 2; + mk(file); + + mk(file); + + function mk (file, cb) { + mkdirp(file, _0755, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }) + }); + } +}); diff --git a/node_modules/mkdirp/test/rel.js b/node_modules/mkdirp/test/rel.js new file mode 100644 index 00000000..4ddb3427 --- /dev/null +++ b/node_modules/mkdirp/test/rel.js @@ -0,0 +1,32 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('rel', function (t) { + t.plan(5); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var cwd = process.cwd(); + process.chdir('/tmp'); + + var file = [x,y,z].join('/'); + + mkdirp(file, _0755, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + process.chdir(cwd); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }) + }) + }); +}); diff --git a/node_modules/mkdirp/test/return.js b/node_modules/mkdirp/test/return.js new file mode 100644 index 00000000..bce68e56 --- /dev/null +++ b/node_modules/mkdirp/test/return.js @@ -0,0 +1,25 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; + +test('return value', function (t) { + t.plan(4); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + // should return the first dir created. + // By this point, it would be profoundly surprising if /tmp didn't + // already exist, since every other test makes things in there. + mkdirp(file, function (err, made) { + t.ifError(err); + t.equal(made, '/tmp/' + x); + mkdirp(file, function (err, made) { + t.ifError(err); + t.equal(made, null); + }); + }); +}); diff --git a/node_modules/mkdirp/test/return_sync.js b/node_modules/mkdirp/test/return_sync.js new file mode 100644 index 00000000..7c222d35 --- /dev/null +++ b/node_modules/mkdirp/test/return_sync.js @@ -0,0 +1,24 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; + +test('return value', function (t) { + t.plan(2); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + // should return the first dir created. + // By this point, it would be profoundly surprising if /tmp didn't + // already exist, since every other test makes things in there. + // Note that this will throw on failure, which will fail the test. + var made = mkdirp.sync(file); + t.equal(made, '/tmp/' + x); + + // making the same file again should have no effect. + made = mkdirp.sync(file); + t.equal(made, null); +}); diff --git a/node_modules/mkdirp/test/root.js b/node_modules/mkdirp/test/root.js new file mode 100644 index 00000000..9e7d079d --- /dev/null +++ b/node_modules/mkdirp/test/root.js @@ -0,0 +1,19 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; +var _0755 = parseInt('0755', 8); + +test('root', function (t) { + // '/' on unix, 'c:/' on windows. + var file = path.resolve('/'); + + mkdirp(file, _0755, function (err) { + if (err) throw err + fs.stat(file, function (er, stat) { + if (er) throw er + t.ok(stat.isDirectory(), 'target is a directory'); + t.end(); + }) + }); +}); diff --git a/node_modules/mkdirp/test/sync.js b/node_modules/mkdirp/test/sync.js new file mode 100644 index 00000000..8c8dc938 --- /dev/null +++ b/node_modules/mkdirp/test/sync.js @@ -0,0 +1,32 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('sync', function (t) { + t.plan(4); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + try { + mkdirp.sync(file, _0755); + } catch (err) { + t.fail(err); + return t.end(); + } + + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); +}); diff --git a/node_modules/mkdirp/test/umask.js b/node_modules/mkdirp/test/umask.js new file mode 100644 index 00000000..2033c63a --- /dev/null +++ b/node_modules/mkdirp/test/umask.js @@ -0,0 +1,28 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('implicit mode from umask', function (t) { + t.plan(5); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + mkdirp(file, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0777 & (~process.umask())); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }) + }); +}); diff --git a/node_modules/mkdirp/test/umask_sync.js b/node_modules/mkdirp/test/umask_sync.js new file mode 100644 index 00000000..11a76147 --- /dev/null +++ b/node_modules/mkdirp/test/umask_sync.js @@ -0,0 +1,32 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('umask sync modes', function (t) { + t.plan(4); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + try { + mkdirp.sync(file); + } catch (err) { + t.fail(err); + return t.end(); + } + + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, (_0777 & (~process.umask()))); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); +}); diff --git a/node_modules/ms/index.js b/node_modules/ms/index.js new file mode 100644 index 00000000..6a522b16 --- /dev/null +++ b/node_modules/ms/index.js @@ -0,0 +1,152 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isNaN(val) === false) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + if (ms >= d) { + return Math.round(ms / d) + 'd'; + } + if (ms >= h) { + return Math.round(ms / h) + 'h'; + } + if (ms >= m) { + return Math.round(ms / m) + 'm'; + } + if (ms >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + return plural(ms, d, 'day') || + plural(ms, h, 'hour') || + plural(ms, m, 'minute') || + plural(ms, s, 'second') || + ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, n, name) { + if (ms < n) { + return; + } + if (ms < n * 1.5) { + return Math.floor(ms / n) + ' ' + name; + } + return Math.ceil(ms / n) + ' ' + name + 's'; +} diff --git a/node_modules/ms/license.md b/node_modules/ms/license.md new file mode 100644 index 00000000..69b61253 --- /dev/null +++ b/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/ms/package.json b/node_modules/ms/package.json new file mode 100644 index 00000000..6a31c81f --- /dev/null +++ b/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.0.0", + "description": "Tiny milisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "3.19.0", + "expect.js": "0.3.1", + "husky": "0.13.3", + "lint-staged": "3.4.1", + "mocha": "3.4.1" + } +} diff --git a/node_modules/ms/readme.md b/node_modules/ms/readme.md new file mode 100644 index 00000000..84a9974c --- /dev/null +++ b/node_modules/ms/readme.md @@ -0,0 +1,51 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Slack Channel](http://zeit-slackin.now.sh/badge.svg)](https://zeit.chat/) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +``` + +### Convert from milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(ms('10 hours')) // "10h" +``` + +### Time format written-out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [node](https://nodejs.org) and in the browser. +- If a number is supplied to `ms`, a string with a unit is returned. +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`). +- If you pass a string with a number and a valid unit, the number of equivalent ms is returned. + +## Caught a bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, node will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/mz/HISTORY.md b/node_modules/mz/HISTORY.md new file mode 100644 index 00000000..6ebee21d --- /dev/null +++ b/node_modules/mz/HISTORY.md @@ -0,0 +1,66 @@ + +2.7.0 / 2017-09-13 +================== + + * feat: support fs.copyFile (#58) + +2.6.0 / 2016-11-22 +================== + + * Added fdatasync to fs api (#46) + +2.5.0 / 2016-11-04 +================== + + * feat: support fs.mkdtemp + +2.4.0 / 2016-03-23 +================== + + * add `fs.truncate()` [#34](https://github.com/normalize/mz/pull/34) + +2.3.1 / 2016-02-01 +================== + + * update `any-promise@v1` + +2.3.0 / 2016-01-30 +================== + + * feat(package): switch to `any-promise` to support more promise engines + +2.2.0 / 2016-01-24 +================== + + * feat(package): add index.js to files + +2.1.0 / 2015-10-15 +================== + + * support for readline library + +2.0.0 / 2015-05-24 +================== + + * support callbacks as well + +1.2.0 / 2014-12-16 +================== + + * refactor promisification to `thenify` and `thenify-all` + +1.1.0 / 2014-11-14 +================== + + * use `graceful-fs` if available + +1.0.1 / 2014-08-18 +================== + + * don't use `bluebird.promisify()` - unnecessarily wraps runtime errors, causing issues + +1.0.0 / 2014-06-18 +================== + + * use `bluebird` by default if found + * support node 0.8 diff --git a/node_modules/mz/LICENSE b/node_modules/mz/LICENSE new file mode 100644 index 00000000..1835f3d9 --- /dev/null +++ b/node_modules/mz/LICENSE @@ -0,0 +1,22 @@ + +The MIT License (MIT) + +Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/mz/README.md b/node_modules/mz/README.md new file mode 100644 index 00000000..50d6557c --- /dev/null +++ b/node_modules/mz/README.md @@ -0,0 +1,106 @@ + +# MZ - Modernize node.js + +[![NPM version][npm-image]][npm-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] +[![Dependency Status][david-image]][david-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +Modernize node.js to current ECMAScript specifications! +node.js will not update their API to ES6+ [for a while](https://github.com/joyent/node/issues/7549). +This library is a wrapper for various aspects of node.js' API. + +## Installation and Usage + +Set `mz` as a dependency and install it. + +```bash +npm i mz +``` + +Then prefix the relevant `require()`s with `mz/`: + +```js +var fs = require('mz/fs') + +fs.exists(__filename).then(function (exists) { + if (exists) // do something +}) +``` + +With ES2017, this will allow you to use async functions cleanly with node's core API: + +```js +const fs = require('mz/fs') + + +async function doSomething () { + if (await fs.exists(__filename)) // do something +} +``` + +## Promisification + +Many node methods are converted into promises. +Any properties that are deprecated or aren't asynchronous will simply be proxied. +The modules wrapped are: + +- `child_process` +- `crypto` +- `dns` +- `fs` (uses `graceful-fs` if available) +- `readline` +- `zlib` + +```js +var exec = require('mz/child_process').exec + +exec('node --version').then(function (stdout) { + console.log(stdout) +}) +``` + +## Promise Engine + +`mz` uses [`any-promise`](https://github.com/kevinbeaty/any-promise). + +## FAQ + +### Can I use this in production? + +Yes, Node 4.x ships with stable promises support. For older engines, +you should probably install your own promise implementation and register it with +`require('any-promise/register')('bluebird')`. + +### Will this make my app faster? + +Nope, probably slower actually. + +### Can I add more features? + +Sure. +Open an issue. + +Currently, the plans are to eventually support: + +- New APIs in node.js that are not available in older versions of node +- ECMAScript7 Streams + +[bluebird]: https://github.com/petkaantonov/bluebird + +[npm-image]: https://img.shields.io/npm/v/mz.svg?style=flat-square +[npm-url]: https://npmjs.org/package/mz +[github-tag]: http://img.shields.io/github/tag/normalize/mz.svg?style=flat-square +[github-url]: https://github.com/normalize/mz/tags +[travis-image]: https://img.shields.io/travis/normalize/mz.svg?style=flat-square +[travis-url]: https://travis-ci.org/normalize/mz +[coveralls-image]: https://img.shields.io/coveralls/normalize/mz.svg?style=flat-square +[coveralls-url]: https://coveralls.io/r/normalize/mz?branch=master +[david-image]: http://img.shields.io/david/normalize/mz.svg?style=flat-square +[david-url]: https://david-dm.org/normalize/mz +[license-image]: http://img.shields.io/npm/l/mz.svg?style=flat-square +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/mz.svg?style=flat-square +[downloads-url]: https://npmjs.org/package/mz diff --git a/node_modules/mz/child_process.js b/node_modules/mz/child_process.js new file mode 100644 index 00000000..06d5d9e3 --- /dev/null +++ b/node_modules/mz/child_process.js @@ -0,0 +1,8 @@ + +require('thenify-all').withCallback( + require('child_process'), + exports, [ + 'exec', + 'execFile', + ] +) diff --git a/node_modules/mz/crypto.js b/node_modules/mz/crypto.js new file mode 100644 index 00000000..d8cff57f --- /dev/null +++ b/node_modules/mz/crypto.js @@ -0,0 +1,9 @@ + +require('thenify-all').withCallback( + require('crypto'), + exports, [ + 'pbkdf2', + 'pseudoRandomBytes', + 'randomBytes' + ] +) diff --git a/node_modules/mz/dns.js b/node_modules/mz/dns.js new file mode 100644 index 00000000..c1035822 --- /dev/null +++ b/node_modules/mz/dns.js @@ -0,0 +1,16 @@ + +require('thenify-all').withCallback( + require('dns'), + exports, [ + 'lookup', + 'resolve', + 'resolve4', + 'resolve6', + 'resolveCname', + 'resolveMx', + 'resolveNs', + 'resolveSrv', + 'resolveTxt', + 'reverse' + ] +) diff --git a/node_modules/mz/fs.js b/node_modules/mz/fs.js new file mode 100644 index 00000000..1cfd2d77 --- /dev/null +++ b/node_modules/mz/fs.js @@ -0,0 +1,62 @@ + +var Promise = require('any-promise') +var fs +try { + fs = require('graceful-fs') +} catch(err) { + fs = require('fs') +} + +var api = [ + 'appendFile', + 'chmod', + 'chown', + 'close', + 'fchmod', + 'fchown', + 'fdatasync', + 'fstat', + 'fsync', + 'ftruncate', + 'futimes', + 'lchown', + 'link', + 'lstat', + 'mkdir', + 'open', + 'read', + 'readFile', + 'readdir', + 'readlink', + 'realpath', + 'rename', + 'rmdir', + 'stat', + 'symlink', + 'truncate', + 'unlink', + 'utimes', + 'write', + 'writeFile' +] + +typeof fs.access === 'function' && api.push('access') +typeof fs.copyFile === 'function' && api.push('copyFile') +typeof fs.mkdtemp === 'function' && api.push('mkdtemp') + +require('thenify-all').withCallback(fs, exports, api) + +exports.exists = function (filename, callback) { + // callback + if (typeof callback === 'function') { + return fs.stat(filename, function (err) { + callback(null, !err); + }) + } + // or promise + return new Promise(function (resolve) { + fs.stat(filename, function (err) { + resolve(!err) + }) + }) +} diff --git a/node_modules/mz/index.js b/node_modules/mz/index.js new file mode 100644 index 00000000..cef508dc --- /dev/null +++ b/node_modules/mz/index.js @@ -0,0 +1,8 @@ +module.exports = { + fs: require('./fs'), + dns: require('./dns'), + zlib: require('./zlib'), + crypto: require('./crypto'), + readline: require('./readline'), + child_process: require('./child_process') +} diff --git a/node_modules/mz/package.json b/node_modules/mz/package.json new file mode 100644 index 00000000..de8d542c --- /dev/null +++ b/node_modules/mz/package.json @@ -0,0 +1,44 @@ +{ + "name": "mz", + "description": "modernize node.js to current ECMAScript standards", + "version": "2.7.0", + "author": { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com", + "twitter": "https://twitter.com/jongleberry" + }, + "license": "MIT", + "repository": "normalize/mz", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + }, + "devDependencies": { + "istanbul": "^0.4.0", + "bluebird": "^3.0.0", + "mocha": "^3.0.0" + }, + "scripts": { + "test": "mocha --reporter spec", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter dot" + }, + "keywords": [ + "promisify", + "promise", + "thenify", + "then", + "es6" + ], + "files": [ + "index.js", + "child_process.js", + "crypto.js", + "dns.js", + "fs.js", + "readline.js", + "zlib.js" + ] +} diff --git a/node_modules/mz/readline.js b/node_modules/mz/readline.js new file mode 100644 index 00000000..eb70c46f --- /dev/null +++ b/node_modules/mz/readline.js @@ -0,0 +1,64 @@ +var readline = require('readline') +var Promise = require('any-promise') +var objectAssign = require('object-assign') +var Interface = readline.Interface + +function wrapCompleter (completer) { + if (completer.length === 2) return completer + + return function (line, cb) { + var result = completer(line) + + if (typeof result.then !== 'function') { + return cb(null, result) + } + + result.catch(cb).then(function (result) { + process.nextTick(function () { cb(null, result) }) + }) + } +} + +function InterfaceAsPromised (input, output, completer, terminal) { + if (arguments.length === 1) { + var options = input + + if (typeof options.completer === 'function') { + options = objectAssign({}, options, { + completer: wrapCompleter(options.completer) + }) + } + + Interface.call(this, options) + } else { + if (typeof completer === 'function') { + completer = wrapCompleter(completer) + } + + Interface.call(this, input, output, completer, terminal) + } +} + +InterfaceAsPromised.prototype = Object.create(Interface.prototype) + +InterfaceAsPromised.prototype.question = function (question, callback) { + if (typeof callback === 'function') { + return Interface.prototype.question.call(this, question, callback) + } + + var self = this + return new Promise(function (resolve) { + Interface.prototype.question.call(self, question, resolve) + }) +} + +objectAssign(exports, readline, { + Interface: InterfaceAsPromised, + createInterface: function (input, output, completer, terminal) { + if (arguments.length === 1) { + return new InterfaceAsPromised(input) + } + + return new InterfaceAsPromised(input, output, completer, terminal) + } +}) diff --git a/node_modules/mz/zlib.js b/node_modules/mz/zlib.js new file mode 100644 index 00000000..a05c26a6 --- /dev/null +++ b/node_modules/mz/zlib.js @@ -0,0 +1,13 @@ + +require('thenify-all').withCallback( + require('zlib'), + exports, [ + 'deflate', + 'deflateRaw', + 'gzip', + 'gunzip', + 'inflate', + 'inflateRaw', + 'unzip', + ] +) diff --git a/node_modules/nan/CHANGELOG.md b/node_modules/nan/CHANGELOG.md new file mode 100644 index 00000000..7cdfbe77 --- /dev/null +++ b/node_modules/nan/CHANGELOG.md @@ -0,0 +1,529 @@ +# NAN ChangeLog + +**Version 2.14.0: current Node 12.2.0, Node 0.12: 0.12.18, Node 0.10: 0.10.48, iojs: 3.3.1** + +### 2.14.0 May 16 2019 + + - Feature: Add missing methods to Nan::Maybe (#852) 4e962489fb84a184035b9fa74f245f650249aca6 + +### 2.13.2 Mar 24 2019 + + - Bugfix: remove usage of deprecated `IsNearDeath` (#842) fbaf42252af279c3d867c6b193571f9711c39847 + +### 2.13.1 Mar 14 2019 + + - Bugfix: check V8 version directly instead of inferring from NMV (#840) 12f9df9f393285de8fb4a8cd01478dc4fe3b089d + +### 2.13.0 Mar 13 2019 + + - Feature: add support for node master (#831) 113c0282072e7ff4f9dfc98b432fd894b798c2c + +### 2.12.1 Dec 18 2018 + + - Bugfix: Fix build breakage with Node.js 10.0.0-10.9.0. (#833) 625e90e8fef8d39ffa7247250a76a100b2487474 + +### 2.12.0 Dec 16 2018 + + - Bugfix: Add scope.Escape() to Call() (#817) 2e5ed4fc3a8ac80a6ef1f2a55099ab3ac8800dc6 + - Bugfix: Fix Node.js v10.12.0 deprecation warnings. 509859cc23b1770376b56550a027840a2ce0f73d + - Feature: Allow SetWeak() for non-object persistent handles. (#824) e6ef6a48e7e671fe3e4b7dddaa8912a3f8262ecd + +### 2.11.1 Sep 29 2018 + + - Fix: adapt to V8 7.0 24a22c3b25eeeec2016c6ec239bdd6169e985447 + +### 2.11.0 Aug 25 2018 + + - Removal: remove `FunctionCallbackInfo::Callee` for nodejs `>= 10` 1a56c0a6efd4fac944cb46c30912a8e023bda7d4 + - Bugfix: Fix `AsyncProgressWorkerBase::WorkProgress` sends invalid data b0c764d1dab11e9f8b37ffb81e2560a4498aad5e + - Feature: Introduce `GetCurrentEventLoop` b4911b0bb1f6d47d860e10ec014d941c51efac5e + - Feature: Add `NAN_MODULE_WORKER_ENABLED` macro as a replacement for `NAN_MODULE` b058fb047d18a58250e66ae831444441c1f2ac7a + +### 2.10.0 Mar 16 2018 + + - Deprecation: Deprecate `MakeCallback` 5e92b19a59e194241d6a658bd6ff7bfbda372950 + - Feature: add `Nan::Call` overload 4482e1242fe124d166fc1a5b2be3c1cc849fe452 + - Feature: add more `Nan::Call` overloads 8584e63e6d04c7d2eb8c4a664e4ef57d70bf672b + - Feature: Fix deprecation warnings for Node 10 1caf258243b0602ed56922bde74f1c91b0cbcb6a + +### 2.9.2 Feb 22 2018 + + - Bugfix: Bandaid for async hooks 212bd2f849be14ef1b02fc85010b053daa24252b + +### 2.9.1 Feb 22 2018 + + - Bugfix: Avoid deprecation warnings in deprecated `Nan::Callback::operator()` 372b14d91289df4604b0f81780709708c45a9aa4 + - Bugfix: Avoid deprecation warnings in `Nan::JSON` 3bc294bce0b7d0a3ee4559926303e5ed4866fda2 + +### 2.9.0 Feb 22 2018 + + - Deprecation: Deprecate legacy `Callback::Call` 6dd5fa690af61ca3523004b433304c581b3ea309 + - Feature: introduce `AsyncResource` class 90c0a179c0d8cb5fd26f1a7d2b1d6231eb402d48o + - Feature: Add context aware `Nan::Callback::Call` functions 7169e09fb088418b6e388222e88b4c13f07ebaee + - Feature: Make `AsyncWorker` context aware 066ba21a6fb9e2b5230c9ed3a6fc51f1211736a4 + - Feature: add `Callback` overload to `Nan::Call` 5328daf66e202658c1dc0d916c3aaba99b3cc606 + - Bugfix: fix warning: suggest parentheses around `&&` within `||` b2bb63d68b8ae623a526b542764e1ac82319cb2c + - Bugfix: Fix compilation on io.js 3 d06114dba0a522fb436f0c5f47b994210968cd7b + +### 2.8.0 Nov 15 2017 + + - Deprecation: Deprecate `Nan::ForceSet` in favor of `Nan::DefineOwnProperty()` 95cbb976d6fbbba88ba0f86dd188223a8591b4e7 + - Feature: Add `Nan::AsyncProgressQueueWorker` a976636ecc2ef617d1b061ce4a6edf39923691cb + - Feature: Add `Nan::DefineOwnProperty()` 95cbb976d6fbbba88ba0f86dd188223a8591b4e7 + - Bugfix: Fix compiling on io.js 1 & 2 82705a64503ce60c62e98df5bd02972bba090900 + - Bugfix: Use DefineOwnProperty instead of ForceSet 95cbb976d6fbbba88ba0f86dd188223a8591b4e7 + +### 2.7.0 Aug 30 2017 + + - Feature: Add `Nan::To()` overload. b93280670c9f6da42ed4cf6cbf085ffdd87bd65b + - Bugfix: Fix ternary in `Nan::MaybeLocal::FromMaybe()`. 79a26f7d362e756a9524e672a82c3d603b542867 + +### 2.6.2 Apr 12 2017 + + - Bugfix: Fix v8::JSON::Parse() deprecation warning. 87f6a3c65815fa062296a994cc863e2fa124867d + +### 2.6.1 Apr 6 2017 + + - Bugfix: nan_json.h: fix build breakage in Node 6 ac8d47dc3c10bfbf3f15a6b951633120c0ee6d51 + +### 2.6.0 Apr 6 2017 + + - Feature: nan: add support for JSON::Parse & Stringify b533226c629cce70e1932a873bb6f849044a56c5 + +### 2.5.1 Jan 23 2017 + + - Bugfix: Fix disappearing handle for private value 6a80995694f162ef63dbc9948fbefd45d4485aa0 + - Bugfix: Add missing scopes a93b8bae6bc7d32a170db6e89228b7f60ee57112 + - Bugfix: Use string::data instead of string::front in NewOneByteString d5f920371e67e1f3b268295daee6e83af86b6e50 + +### 2.5.0 Dec 21 2016 + + - Feature: Support Private accessors a86255cb357e8ad8ccbf1f6a4a901c921e39a178 + - Bugfix: Abort in delete operators that shouldn't be called 0fe38215ff8581703967dfd26c12793feb960018 + +### 2.4.0 Jul 10 2016 + + - Feature: Rewrite Callback to add Callback::Reset c4cf44d61f8275cd5f7b0c911d7a806d4004f649 + - Feature: AsyncProgressWorker: add template types for .send 1242c9a11a7ed481c8f08ec06316385cacc513d0 + - Bugfix: Add constness to old Persistent comparison operators bd43cb9982c7639605d60fd073efe8cae165d9b2 + +### 2.3.5 May 31 2016 + + - Bugfix: Replace NAN_INLINE with 'inline' keyword. 71819d8725f822990f439479c9aba3b240804909 + +### 2.3.4 May 31 2016 + + - Bugfix: Remove V8 deprecation warnings 0592fb0a47f3a1c7763087ebea8e1138829f24f9 + - Bugfix: Fix new versions not to use WeakCallbackInfo::IsFirstPass 615c19d9e03d4be2049c10db0151edbc3b229246 + - Bugfix: Make ObjectWrap::handle() const d19af99595587fe7a26bd850af6595c2a7145afc + - Bugfix: Fix compilation errors related to 0592fb0a47f3a1c7763087ebea8e1138829f24f9 e9191c525b94f652718325e28610a1adcf90fed8 + +### 2.3.3 May 4 2016 + + - Bugfix: Refactor SetMethod() to deal with v8::Templates (#566) b9083cf6d5de6ebe6bcb49c7502fbb7c0d9ddda8 + +### 2.3.2 Apr 27 2016 + + - Bugfix: Fix compilation on outdated versions due to Handle removal f8b7c875d04d425a41dfd4f3f8345bc3a11e6c52 + +### 2.3.1 Apr 27 2016 + + - Bugfix: Don't use deprecated v8::Template::Set() in SetMethod a90951e9ea70fa1b3836af4b925322919159100e + +### 2.3.0 Apr 27 2016 + + - Feature: added Signal() for invoking async callbacks without sending data from AsyncProgressWorker d8adba45f20e077d00561b20199133620c990b38 + - Bugfix: Don't use deprecated v8::Template::Set() 00dacf0a4b86027415867fa7f1059acc499dcece + +### 2.2.1 Mar 29 2016 + + - Bugfix: Use NewFromUnsigned in ReturnValue::Set(uint32_t i) for pre_12 3a18f9bdce29826e0e4c217854bc476918241a58 + - Performance: Remove unneeeded nullptr checks b715ef44887931c94f0d1605b3b1a4156eebece9 + +### 2.2.0 Jan 9 2016 + + - Feature: Add Function::Call wrapper 4c157474dacf284d125c324177b45aa5dabc08c6 + - Feature: Rename GC*logueCallback to GCCallback for > 4.0 3603435109f981606d300eb88004ca101283acec + - Bugfix: Fix Global::Pass for old versions 367e82a60fbaa52716232cc89db1cc3f685d77d9 + - Bugfix: Remove weird MaybeLocal wrapping of what already is a MaybeLocal 23b4590db10c2ba66aee2338aebe9751c4cb190b + +### 2.1.0 Oct 8 2015 + + - Deprecation: Deprecate NanErrnoException in favor of ErrnoException 0af1ca4cf8b3f0f65ed31bc63a663ab3319da55c + - Feature: added helper class for accessing contents of typedarrays 17b51294c801e534479d5463697a73462d0ca555 + - Feature: [Maybe types] Add MakeMaybe(...) 48d7b53d9702b0c7a060e69ea10fea8fb48d814d + - Feature: new: allow utf16 string with length 66ac6e65c8ab9394ef588adfc59131b3b9d8347b + - Feature: Introduce SetCallHandler and SetCallAsFunctionHandler 7764a9a115d60ba10dc24d86feb0fbc9b4f75537 + - Bugfix: Enable creating Locals from Globals under Node 0.10. 9bf9b8b190821af889790fdc18ace57257e4f9ff + - Bugfix: Fix issue #462 where PropertyCallbackInfo data is not stored safely. 55f50adedd543098526c7b9f4fffd607d3f9861f + +### 2.0.9 Sep 8 2015 + + - Bugfix: EscapableHandleScope in Nan::NewBuffer for Node 0.8 and 0.10 b1654d7 + +### 2.0.8 Aug 28 2015 + + - Work around duplicate linking bug in clang 11902da + +### 2.0.7 Aug 26 2015 + + - Build: Repackage + +### 2.0.6 Aug 26 2015 + + - Bugfix: Properly handle null callback in FunctionTemplate factory 6e99cb1 + - Bugfix: Remove unused static std::map instances 525bddc + - Bugfix: Make better use of maybe versions of APIs bfba85b + - Bugfix: Fix shadowing issues with handle in ObjectWrap 0a9072d + +### 2.0.5 Aug 10 2015 + + - Bugfix: Reimplement weak callback in ObjectWrap 98d38c1 + - Bugfix: Make sure callback classes are not assignable, copyable or movable 81f9b1d + +### 2.0.4 Aug 6 2015 + + - Build: Repackage + +### 2.0.3 Aug 6 2015 + + - Bugfix: Don't use clang++ / g++ syntax extension. 231450e + +### 2.0.2 Aug 6 2015 + + - Build: Repackage + +### 2.0.1 Aug 6 2015 + + - Bugfix: Add workaround for missing REPLACE_INVALID_UTF8 60d6687 + - Bugfix: Reimplement ObjectWrap from scratch to prevent memory leaks 6484601 + - Bugfix: Fix Persistent leak in FunctionCallbackInfo and PropertyCallbackInfo 641ef5f + - Bugfix: Add missing overload for Nan::NewInstance that takes argc/argv 29450ed + +### 2.0.0 Jul 31 2015 + + - Change: Renamed identifiers with leading underscores b5932b4 + - Change: Replaced NanObjectWrapHandle with class NanObjectWrap 464f1e1 + - Change: Replace NanScope and NanEscpableScope macros with classes 47751c4 + - Change: Rename NanNewBufferHandle to NanNewBuffer 6745f99 + - Change: Rename NanBufferUse to NanNewBuffer 3e8b0a5 + - Change: Rename NanNewBuffer to NanCopyBuffer d6af78d + - Change: Remove Nan prefix from all names 72d1f67 + - Change: Update Buffer API for new upstream changes d5d3291 + - Change: Rename Scope and EscapableScope to HandleScope and EscapableHandleScope 21a7a6a + - Change: Get rid of Handles e6c0daf + - Feature: Support io.js 3 with V8 4.4 + - Feature: Introduce NanPersistent 7fed696 + - Feature: Introduce NanGlobal 4408da1 + - Feature: Added NanTryCatch 10f1ca4 + - Feature: Update for V8 v4.3 4b6404a + - Feature: Introduce NanNewOneByteString c543d32 + - Feature: Introduce namespace Nan 67ed1b1 + - Removal: Remove NanLocker and NanUnlocker dd6e401 + - Removal: Remove string converters, except NanUtf8String, which now follows the node implementation b5d00a9 + - Removal: Remove NanReturn* macros d90a25c + - Removal: Remove HasInstance e8f84fe + + +### 1.9.0 Jul 31 2015 + + - Feature: Added `NanFatalException` 81d4a2c + - Feature: Added more error types 4265f06 + - Feature: Added dereference and function call operators to NanCallback c4b2ed0 + - Feature: Added indexed GetFromPersistent and SaveToPersistent edd510c + - Feature: Added more overloads of SaveToPersistent and GetFromPersistent 8b1cef6 + - Feature: Added NanErrnoException dd87d9e + - Correctness: Prevent assign, copy, and move for classes that do not support it 1f55c59, 4b808cb, c96d9b2, fba4a29, 3357130 + - Deprecation: Deprecate `NanGetPointerSafe` and `NanSetPointerSafe` 81d4a2c + - Deprecation: Deprecate `NanBooleanOptionValue` and `NanUInt32OptionValue` 0ad254b + +### 1.8.4 Apr 26 2015 + + - Build: Repackage + +### 1.8.3 Apr 26 2015 + + - Bugfix: Include missing header 1af8648 + +### 1.8.2 Apr 23 2015 + + - Build: Repackage + +### 1.8.1 Apr 23 2015 + + - Bugfix: NanObjectWrapHandle should take a pointer 155f1d3 + +### 1.8.0 Apr 23 2015 + + - Feature: Allow primitives with NanReturnValue 2e4475e + - Feature: Added comparison operators to NanCallback 55b075e + - Feature: Backport thread local storage 15bb7fa + - Removal: Remove support for signatures with arguments 8a2069d + - Correcteness: Replaced NanObjectWrapHandle macro with function 0bc6d59 + +### 1.7.0 Feb 28 2015 + + - Feature: Made NanCallback::Call accept optional target 8d54da7 + - Feature: Support atom-shell 0.21 0b7f1bb + +### 1.6.2 Feb 6 2015 + + - Bugfix: NanEncode: fix argument type for node::Encode on io.js 2be8639 + +### 1.6.1 Jan 23 2015 + + - Build: version bump + +### 1.5.3 Jan 23 2015 + + - Build: repackage + +### 1.6.0 Jan 23 2015 + + - Deprecated `NanNewContextHandle` in favor of `NanNew` 49259af + - Support utility functions moved in newer v8 versions (Node 0.11.15, io.js 1.0) a0aa179 + - Added `NanEncode`, `NanDecodeBytes` and `NanDecodeWrite` 75e6fb9 + +### 1.5.2 Jan 23 2015 + + - Bugfix: Fix non-inline definition build error with clang++ 21d96a1, 60fadd4 + - Bugfix: Readded missing String constructors 18d828f + - Bugfix: Add overload handling NanNew(..) 5ef813b + - Bugfix: Fix uv_work_cb versioning 997e4ae + - Bugfix: Add function factory and test 4eca89c + - Bugfix: Add object template factory and test cdcb951 + - Correctness: Lifted an io.js related typedef c9490be + - Correctness: Make explicit downcasts of String lengths 00074e6 + - Windows: Limit the scope of disabled warning C4530 83d7deb + +### 1.5.1 Jan 15 2015 + + - Build: version bump + +### 1.4.3 Jan 15 2015 + + - Build: version bump + +### 1.4.2 Jan 15 2015 + + - Feature: Support io.js 0dbc5e8 + +### 1.5.0 Jan 14 2015 + + - Feature: Support io.js b003843 + - Correctness: Improved NanNew internals 9cd4f6a + - Feature: Implement progress to NanAsyncWorker 8d6a160 + +### 1.4.1 Nov 8 2014 + + - Bugfix: Handle DEBUG definition correctly + - Bugfix: Accept int as Boolean + +### 1.4.0 Nov 1 2014 + + - Feature: Added NAN_GC_CALLBACK 6a5c245 + - Performance: Removed unnecessary local handle creation 18a7243, 41fe2f8 + - Correctness: Added constness to references in NanHasInstance 02c61cd + - Warnings: Fixed spurious warnings from -Wundef and -Wshadow, 541b122, 99d8cb6 + - Windoze: Shut Visual Studio up when compiling 8d558c1 + - License: Switch to plain MIT from custom hacked MIT license 11de983 + - Build: Added test target to Makefile e232e46 + - Performance: Removed superfluous scope in NanAsyncWorker f4b7821 + - Sugar/Feature: Added NanReturnThis() and NanReturnHolder() shorthands 237a5ff, d697208 + - Feature: Added suitable overload of NanNew for v8::Integer::NewFromUnsigned b27b450 + +### 1.3.0 Aug 2 2014 + + - Added NanNew(std::string) + - Added NanNew(std::string&) + - Added NanAsciiString helper class + - Added NanUtf8String helper class + - Added NanUcs2String helper class + - Deprecated NanRawString() + - Deprecated NanCString() + - Added NanGetIsolateData(v8::Isolate *isolate) + - Added NanMakeCallback(v8::Handle target, v8::Handle func, int argc, v8::Handle* argv) + - Added NanMakeCallback(v8::Handle target, v8::Handle symbol, int argc, v8::Handle* argv) + - Added NanMakeCallback(v8::Handle target, const char* method, int argc, v8::Handle* argv) + - Added NanSetTemplate(v8::Handle templ, v8::Handle name , v8::Handle value, v8::PropertyAttribute attributes) + - Added NanSetPrototypeTemplate(v8::Local templ, v8::Handle name, v8::Handle value, v8::PropertyAttribute attributes) + - Added NanSetInstanceTemplate(v8::Local templ, const char *name, v8::Handle value) + - Added NanSetInstanceTemplate(v8::Local templ, v8::Handle name, v8::Handle value, v8::PropertyAttribute attributes) + +### 1.2.0 Jun 5 2014 + + - Add NanSetPrototypeTemplate + - Changed NAN_WEAK_CALLBACK internals, switched _NanWeakCallbackData to class, + introduced _NanWeakCallbackDispatcher + - Removed -Wno-unused-local-typedefs from test builds + - Made test builds Windows compatible ('Sleep()') + +### 1.1.2 May 28 2014 + + - Release to fix more stuff-ups in 1.1.1 + +### 1.1.1 May 28 2014 + + - Release to fix version mismatch in nan.h and lack of changelog entry for 1.1.0 + +### 1.1.0 May 25 2014 + + - Remove nan_isolate, use v8::Isolate::GetCurrent() internally instead + - Additional explicit overloads for NanNew(): (char*,int), (uint8_t*[,int]), + (uint16_t*[,int), double, int, unsigned int, bool, v8::String::ExternalStringResource*, + v8::String::ExternalAsciiStringResource* + - Deprecate NanSymbol() + - Added SetErrorMessage() and ErrorMessage() to NanAsyncWorker + +### 1.0.0 May 4 2014 + + - Heavy API changes for V8 3.25 / Node 0.11.13 + - Use cpplint.py + - Removed NanInitPersistent + - Removed NanPersistentToLocal + - Removed NanFromV8String + - Removed NanMakeWeak + - Removed NanNewLocal + - Removed NAN_WEAK_CALLBACK_OBJECT + - Removed NAN_WEAK_CALLBACK_DATA + - Introduce NanNew, replaces NanNewLocal, NanPersistentToLocal, adds many overloaded typed versions + - Introduce NanUndefined, NanNull, NanTrue and NanFalse + - Introduce NanEscapableScope and NanEscapeScope + - Introduce NanMakeWeakPersistent (requires a special callback to work on both old and new node) + - Introduce NanMakeCallback for node::MakeCallback + - Introduce NanSetTemplate + - Introduce NanGetCurrentContext + - Introduce NanCompileScript and NanRunScript + - Introduce NanAdjustExternalMemory + - Introduce NanAddGCEpilogueCallback, NanAddGCPrologueCallback, NanRemoveGCEpilogueCallback, NanRemoveGCPrologueCallback + - Introduce NanGetHeapStatistics + - Rename NanAsyncWorker#SavePersistent() to SaveToPersistent() + +### 0.8.0 Jan 9 2014 + + - NanDispose -> NanDisposePersistent, deprecate NanDispose + - Extract _NAN_*_RETURN_TYPE, pull up NAN_*() + +### 0.7.1 Jan 9 2014 + + - Fixes to work against debug builds of Node + - Safer NanPersistentToLocal (avoid reinterpret_cast) + - Speed up common NanRawString case by only extracting flattened string when necessary + +### 0.7.0 Dec 17 2013 + + - New no-arg form of NanCallback() constructor. + - NanCallback#Call takes Handle rather than Local + - Removed deprecated NanCallback#Run method, use NanCallback#Call instead + - Split off _NAN_*_ARGS_TYPE from _NAN_*_ARGS + - Restore (unofficial) Node 0.6 compatibility at NanCallback#Call() + - Introduce NanRawString() for char* (or appropriate void*) from v8::String + (replacement for NanFromV8String) + - Introduce NanCString() for null-terminated char* from v8::String + +### 0.6.0 Nov 21 2013 + + - Introduce NanNewLocal(v8::Handle value) for use in place of + v8::Local::New(...) since v8 started requiring isolate in Node 0.11.9 + +### 0.5.2 Nov 16 2013 + + - Convert SavePersistent and GetFromPersistent in NanAsyncWorker from protected and public + +### 0.5.1 Nov 12 2013 + + - Use node::MakeCallback() instead of direct v8::Function::Call() + +### 0.5.0 Nov 11 2013 + + - Added @TooTallNate as collaborator + - New, much simpler, "include_dirs" for binding.gyp + - Added full range of NAN_INDEX_* macros to match NAN_PROPERTY_* macros + +### 0.4.4 Nov 2 2013 + + - Isolate argument from v8::Persistent::MakeWeak removed for 0.11.8+ + +### 0.4.3 Nov 2 2013 + + - Include node_object_wrap.h, removed from node.h for Node 0.11.8. + +### 0.4.2 Nov 2 2013 + + - Handle deprecation of v8::Persistent::Dispose(v8::Isolate* isolate)) for + Node 0.11.8 release. + +### 0.4.1 Sep 16 2013 + + - Added explicit `#include ` as it was removed from node.h for v0.11.8 + +### 0.4.0 Sep 2 2013 + + - Added NAN_INLINE and NAN_DEPRECATED and made use of them + - Added NanError, NanTypeError and NanRangeError + - Cleaned up code + +### 0.3.2 Aug 30 2013 + + - Fix missing scope declaration in GetFromPersistent() and SaveToPersistent + in NanAsyncWorker + +### 0.3.1 Aug 20 2013 + + - fix "not all control paths return a value" compile warning on some platforms + +### 0.3.0 Aug 19 2013 + + - Made NAN work with NPM + - Lots of fixes to NanFromV8String, pulling in features from new Node core + - Changed node::encoding to Nan::Encoding in NanFromV8String to unify the API + - Added optional error number argument for NanThrowError() + - Added NanInitPersistent() + - Added NanReturnNull() and NanReturnEmptyString() + - Added NanLocker and NanUnlocker + - Added missing scopes + - Made sure to clear disposed Persistent handles + - Changed NanAsyncWorker to allocate error messages on the heap + - Changed NanThrowError(Local) to NanThrowError(Handle) + - Fixed leak in NanAsyncWorker when errmsg is used + +### 0.2.2 Aug 5 2013 + + - Fixed usage of undefined variable with node::BASE64 in NanFromV8String() + +### 0.2.1 Aug 5 2013 + + - Fixed 0.8 breakage, node::BUFFER encoding type not available in 0.8 for + NanFromV8String() + +### 0.2.0 Aug 5 2013 + + - Added NAN_PROPERTY_GETTER, NAN_PROPERTY_SETTER, NAN_PROPERTY_ENUMERATOR, + NAN_PROPERTY_DELETER, NAN_PROPERTY_QUERY + - Extracted _NAN_METHOD_ARGS, _NAN_GETTER_ARGS, _NAN_SETTER_ARGS, + _NAN_PROPERTY_GETTER_ARGS, _NAN_PROPERTY_SETTER_ARGS, + _NAN_PROPERTY_ENUMERATOR_ARGS, _NAN_PROPERTY_DELETER_ARGS, + _NAN_PROPERTY_QUERY_ARGS + - Added NanGetInternalFieldPointer, NanSetInternalFieldPointer + - Added NAN_WEAK_CALLBACK, NAN_WEAK_CALLBACK_OBJECT, + NAN_WEAK_CALLBACK_DATA, NanMakeWeak + - Renamed THROW_ERROR to _NAN_THROW_ERROR + - Added NanNewBufferHandle(char*, size_t, node::smalloc::FreeCallback, void*) + - Added NanBufferUse(char*, uint32_t) + - Added NanNewContextHandle(v8::ExtensionConfiguration*, + v8::Handle, v8::Handle) + - Fixed broken NanCallback#GetFunction() + - Added optional encoding and size arguments to NanFromV8String() + - Added NanGetPointerSafe() and NanSetPointerSafe() + - Added initial test suite (to be expanded) + - Allow NanUInt32OptionValue to convert any Number object + +### 0.1.0 Jul 21 2013 + + - Added `NAN_GETTER`, `NAN_SETTER` + - Added `NanThrowError` with single Local argument + - Added `NanNewBufferHandle` with single uint32_t argument + - Added `NanHasInstance(Persistent&, Handle)` + - Added `Local NanCallback#GetFunction()` + - Added `NanCallback#Call(int, Local[])` + - Deprecated `NanCallback#Run(int, Local[])` in favour of Call diff --git a/node_modules/nan/LICENSE.md b/node_modules/nan/LICENSE.md new file mode 100644 index 00000000..dddd13d5 --- /dev/null +++ b/node_modules/nan/LICENSE.md @@ -0,0 +1,13 @@ +The MIT License (MIT) +===================== + +Copyright (c) 2018 NAN contributors +----------------------------------- + +*NAN contributors listed at * + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/nan/README.md b/node_modules/nan/README.md new file mode 100644 index 00000000..3389ef52 --- /dev/null +++ b/node_modules/nan/README.md @@ -0,0 +1,456 @@ +Native Abstractions for Node.js +=============================== + +**A header file filled with macro and utility goodness for making add-on development for Node.js easier across versions 0.8, 0.10, 0.12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 and 12.** + +***Current version: 2.14.0*** + +*(See [CHANGELOG.md](https://github.com/nodejs/nan/blob/master/CHANGELOG.md) for complete ChangeLog)* + +[![NPM](https://nodei.co/npm/nan.png?downloads=true&downloadRank=true)](https://nodei.co/npm/nan/) [![NPM](https://nodei.co/npm-dl/nan.png?months=6&height=3)](https://nodei.co/npm/nan/) + +[![Build Status](https://api.travis-ci.org/nodejs/nan.svg?branch=master)](https://travis-ci.org/nodejs/nan) +[![Build status](https://ci.appveyor.com/api/projects/status/kh73pbm9dsju7fgh)](https://ci.appveyor.com/project/RodVagg/nan) + +Thanks to the crazy changes in V8 (and some in Node core), keeping native addons compiling happily across versions, particularly 0.10 to 0.12 to 4.0, is a minor nightmare. The goal of this project is to store all logic necessary to develop native Node.js addons without having to inspect `NODE_MODULE_VERSION` and get yourself into a macro-tangle. + +This project also contains some helper utilities that make addon development a bit more pleasant. + + * **[News & Updates](#news)** + * **[Usage](#usage)** + * **[Example](#example)** + * **[API](#api)** + * **[Tests](#tests)** + * **[Known issues](#issues)** + * **[Governance & Contributing](#governance)** + + + +## News & Updates + + + +## Usage + +Simply add **NAN** as a dependency in the *package.json* of your Node addon: + +``` bash +$ npm install --save nan +``` + +Pull in the path to **NAN** in your *binding.gyp* so that you can use `#include ` in your *.cpp* files: + +``` python +"include_dirs" : [ + "` when compiling your addon. + + + +## Example + +Just getting started with Nan? Take a look at the **[Node Add-on Examples](https://github.com/nodejs/node-addon-examples)**. + +Refer to a [quick-start **Nan** Boilerplate](https://github.com/fcanas/node-native-boilerplate) for a ready-to-go project that utilizes basic Nan functionality. + +For a simpler example, see the **[async pi estimation example](https://github.com/nodejs/nan/tree/master/examples/async_pi_estimate)** in the examples directory for full code and an explanation of what this Monte Carlo Pi estimation example does. Below are just some parts of the full example that illustrate the use of **NAN**. + +Yet another example is **[nan-example-eol](https://github.com/CodeCharmLtd/nan-example-eol)**. It shows newline detection implemented as a native addon. + +Also take a look at our comprehensive **[C++ test suite](https://github.com/nodejs/nan/tree/master/test/cpp)** which has a plethora of code snippets for your pasting pleasure. + + + +## API + +Additional to the NAN documentation below, please consult: + +* [The V8 Getting Started * Guide](https://github.com/v8/v8/wiki/Getting%20Started%20with%20Embedding) +* [The V8 Embedders * Guide](https://github.com/v8/v8/wiki/Embedder%27s%20Guide) +* [V8 API Documentation](https://v8docs.nodesource.com/) +* [Node Add-on Documentation](https://nodejs.org/api/addons.html) + + + +### JavaScript-accessible methods + +A _template_ is a blueprint for JavaScript functions and objects in a context. You can use a template to wrap C++ functions and data structures within JavaScript objects so that they can be manipulated from JavaScript. See the V8 Embedders Guide section on [Templates](https://github.com/v8/v8/wiki/Embedder%27s-Guide#templates) for further information. + +In order to expose functionality to JavaScript via a template, you must provide it to V8 in a form that it understands. Across the versions of V8 supported by NAN, JavaScript-accessible method signatures vary widely, NAN fully abstracts method declaration and provides you with an interface that is similar to the most recent V8 API but is backward-compatible with older versions that still use the now-deceased `v8::Argument` type. + +* **Method argument types** + - Nan::FunctionCallbackInfo + - Nan::PropertyCallbackInfo + - Nan::ReturnValue +* **Method declarations** + - Method declaration + - Getter declaration + - Setter declaration + - Property getter declaration + - Property setter declaration + - Property enumerator declaration + - Property deleter declaration + - Property query declaration + - Index getter declaration + - Index setter declaration + - Index enumerator declaration + - Index deleter declaration + - Index query declaration +* Method and template helpers + - Nan::SetMethod() + - Nan::SetPrototypeMethod() + - Nan::SetAccessor() + - Nan::SetNamedPropertyHandler() + - Nan::SetIndexedPropertyHandler() + - Nan::SetTemplate() + - Nan::SetPrototypeTemplate() + - Nan::SetInstanceTemplate() + - Nan::SetCallHandler() + - Nan::SetCallAsFunctionHandler() + +### Scopes + +A _local handle_ is a pointer to an object. All V8 objects are accessed using handles, they are necessary because of the way the V8 garbage collector works. + +A handle scope can be thought of as a container for any number of handles. When you've finished with your handles, instead of deleting each one individually you can simply delete their scope. + +The creation of `HandleScope` objects is different across the supported versions of V8. Therefore, NAN provides its own implementations that can be used safely across these. + + - Nan::HandleScope + - Nan::EscapableHandleScope + +Also see the V8 Embedders Guide section on [Handles and Garbage Collection](https://github.com/v8/v8/wiki/Embedder%27s%20Guide#handles-and-garbage-collection). + +### Persistent references + +An object reference that is independent of any `HandleScope` is a _persistent_ reference. Where a `Local` handle only lives as long as the `HandleScope` in which it was allocated, a `Persistent` handle remains valid until it is explicitly disposed. + +Due to the evolution of the V8 API, it is necessary for NAN to provide a wrapper implementation of the `Persistent` classes to supply compatibility across the V8 versions supported. + + - Nan::PersistentBase & v8::PersistentBase + - Nan::NonCopyablePersistentTraits & v8::NonCopyablePersistentTraits + - Nan::CopyablePersistentTraits & v8::CopyablePersistentTraits + - Nan::Persistent + - Nan::Global + - Nan::WeakCallbackInfo + - Nan::WeakCallbackType + +Also see the V8 Embedders Guide section on [Handles and Garbage Collection](https://developers.google.com/v8/embed#handles). + +### New + +NAN provides a `Nan::New()` helper for the creation of new JavaScript objects in a way that's compatible across the supported versions of V8. + + - Nan::New() + - Nan::Undefined() + - Nan::Null() + - Nan::True() + - Nan::False() + - Nan::EmptyString() + + +### Converters + +NAN contains functions that convert `v8::Value`s to other `v8::Value` types and native types. Since type conversion is not guaranteed to succeed, they return `Nan::Maybe` types. These converters can be used in place of `value->ToX()` and `value->XValue()` (where `X` is one of the types, e.g. `Boolean`) in a way that provides a consistent interface across V8 versions. Newer versions of V8 use the new `v8::Maybe` and `v8::MaybeLocal` types for these conversions, older versions don't have this functionality so it is provided by NAN. + + - Nan::To() + +### Maybe Types + +The `Nan::MaybeLocal` and `Nan::Maybe` types are monads that encapsulate `v8::Local` handles that _may be empty_. + +* **Maybe Types** + - Nan::MaybeLocal + - Nan::Maybe + - Nan::Nothing + - Nan::Just +* **Maybe Helpers** + - Nan::Call() + - Nan::ToDetailString() + - Nan::ToArrayIndex() + - Nan::Equals() + - Nan::NewInstance() + - Nan::GetFunction() + - Nan::Set() + - Nan::DefineOwnProperty() + - Nan::ForceSet() + - Nan::Get() + - Nan::GetPropertyAttributes() + - Nan::Has() + - Nan::Delete() + - Nan::GetPropertyNames() + - Nan::GetOwnPropertyNames() + - Nan::SetPrototype() + - Nan::ObjectProtoToString() + - Nan::HasOwnProperty() + - Nan::HasRealNamedProperty() + - Nan::HasRealIndexedProperty() + - Nan::HasRealNamedCallbackProperty() + - Nan::GetRealNamedPropertyInPrototypeChain() + - Nan::GetRealNamedProperty() + - Nan::CallAsFunction() + - Nan::CallAsConstructor() + - Nan::GetSourceLine() + - Nan::GetLineNumber() + - Nan::GetStartColumn() + - Nan::GetEndColumn() + - Nan::CloneElementAt() + - Nan::HasPrivate() + - Nan::GetPrivate() + - Nan::SetPrivate() + - Nan::DeletePrivate() + - Nan::MakeMaybe() + +### Script + +NAN provides a `v8::Script` helpers as the API has changed over the supported versions of V8. + + - Nan::CompileScript() + - Nan::RunScript() + + +### JSON + +The _JSON_ object provides the c++ versions of the methods offered by the `JSON` object in javascript. V8 exposes these methods via the `v8::JSON` object. + + - Nan::JSON.Parse + - Nan::JSON.Stringify + +Refer to the V8 JSON object in the [V8 documentation](https://v8docs.nodesource.com/node-8.11/da/d6f/classv8_1_1_j_s_o_n.html) for more information about these methods and their arguments. + +### Errors + +NAN includes helpers for creating, throwing and catching Errors as much of this functionality varies across the supported versions of V8 and must be abstracted. + +Note that an Error object is simply a specialized form of `v8::Value`. + +Also consult the V8 Embedders Guide section on [Exceptions](https://developers.google.com/v8/embed#exceptions) for more information. + + - Nan::Error() + - Nan::RangeError() + - Nan::ReferenceError() + - Nan::SyntaxError() + - Nan::TypeError() + - Nan::ThrowError() + - Nan::ThrowRangeError() + - Nan::ThrowReferenceError() + - Nan::ThrowSyntaxError() + - Nan::ThrowTypeError() + - Nan::FatalException() + - Nan::ErrnoException() + - Nan::TryCatch + + +### Buffers + +NAN's `node::Buffer` helpers exist as the API has changed across supported Node versions. Use these methods to ensure compatibility. + + - Nan::NewBuffer() + - Nan::CopyBuffer() + - Nan::FreeCallback() + +### Nan::Callback + +`Nan::Callback` makes it easier to use `v8::Function` handles as callbacks. A class that wraps a `v8::Function` handle, protecting it from garbage collection and making it particularly useful for storage and use across asynchronous execution. + + - Nan::Callback + +### Asynchronous work helpers + +`Nan::AsyncWorker`, `Nan::AsyncProgressWorker` and `Nan::AsyncProgressQueueWorker` are helper classes that make working with asynchronous code easier. + + - Nan::AsyncWorker + - Nan::AsyncProgressWorkerBase & Nan::AsyncProgressWorker + - Nan::AsyncProgressQueueWorker + - Nan::AsyncQueueWorker + +### Strings & Bytes + +Miscellaneous string & byte encoding and decoding functionality provided for compatibility across supported versions of V8 and Node. Implemented by NAN to ensure that all encoding types are supported, even for older versions of Node where they are missing. + + - Nan::Encoding + - Nan::Encode() + - Nan::DecodeBytes() + - Nan::DecodeWrite() + + +### Object Wrappers + +The `ObjectWrap` class can be used to make wrapped C++ objects and a factory of wrapped objects. + + - Nan::ObjectWrap + + +### V8 internals + +The hooks to access V8 internals—including GC and statistics—are different across the supported versions of V8, therefore NAN provides its own hooks that call the appropriate V8 methods. + + - NAN_GC_CALLBACK() + - Nan::AddGCEpilogueCallback() + - Nan::RemoveGCEpilogueCallback() + - Nan::AddGCPrologueCallback() + - Nan::RemoveGCPrologueCallback() + - Nan::GetHeapStatistics() + - Nan::SetCounterFunction() + - Nan::SetCreateHistogramFunction() + - Nan::SetAddHistogramSampleFunction() + - Nan::IdleNotification() + - Nan::LowMemoryNotification() + - Nan::ContextDisposedNotification() + - Nan::GetInternalFieldPointer() + - Nan::SetInternalFieldPointer() + - Nan::AdjustExternalMemory() + + +### Miscellaneous V8 Helpers + + - Nan::Utf8String + - Nan::GetCurrentContext() + - Nan::SetIsolateData() + - Nan::GetIsolateData() + - Nan::TypedArrayContents + + +### Miscellaneous Node Helpers + + - Nan::AsyncResource + - Nan::MakeCallback() + - NAN_MODULE_INIT() + - Nan::Export() + + + + + + +### Tests + +To run the NAN tests do: + +``` sh +npm install +npm run-script rebuild-tests +npm test +``` + +Or just: + +``` sh +npm install +make test +``` + + + +## Known issues + +### Compiling against Node.js 0.12 on OSX + +With new enough compilers available on OSX, the versions of V8 headers corresponding to Node.js 0.12 +do not compile anymore. The error looks something like: + +``` +❯ CXX(target) Release/obj.target/accessors/cpp/accessors.o +In file included from ../cpp/accessors.cpp:9: +In file included from ../../nan.h:51: +In file included from /Users/ofrobots/.node-gyp/0.12.18/include/node/node.h:61: +/Users/ofrobots/.node-gyp/0.12.18/include/node/v8.h:5800:54: error: 'CreateHandle' is a protected member of 'v8::HandleScope' + return Handle(reinterpret_cast(HandleScope::CreateHandle( + ~~~~~~~~~~~~~^~~~~~~~~~~~ +``` + +This can be worked around by patching your local versions of v8.h corresponding to Node 0.12 to make +`v8::Handle` a friend of `v8::HandleScope`. Since neither Node.js not V8 support this release line anymore +this patch cannot be released by either project in an official release. + +For this reason, we do not test against Node.js 0.12 on OSX in this project's CI. If you need to support +that configuration, you will need to either get an older compiler, or apply a source patch to the version +of V8 headers as a workaround. + + + +## Governance & Contributing + +NAN is governed by the [Node.js Addon API Working Group](https://github.com/nodejs/CTC/blob/master/WORKING_GROUPS.md#addon-api) + +### Addon API Working Group (WG) + +The NAN project is jointly governed by a Working Group which is responsible for high-level guidance of the project. + +Members of the WG are also known as Collaborators, there is no distinction between the two, unlike other Node.js projects. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project [README.md](./README.md#collaborators). + +Individuals making significant and valuable contributions are made members of the WG and given commit-access to the project. These individuals are identified by the WG and their addition to the WG is discussed via GitHub and requires unanimous consensus amongst those WG members participating in the discussion with a quorum of 50% of WG members required for acceptance of the vote. + +_Note:_ If you make a significant contribution and are not considered for commit-access log an issue or contact a WG member directly. + +For the current list of WG members / Collaborators, see the project [README.md](./README.md#collaborators). + +### Consensus Seeking Process + +The WG follows a [Consensus Seeking](https://en.wikipedia.org/wiki/Consensus-seeking_decision-making) decision making model. + +Modifications of the contents of the NAN repository are made on a collaborative basis. Anybody with a GitHub account may propose a modification via pull request and it will be considered by the WG. All pull requests must be reviewed and accepted by a WG member with sufficient expertise who is able to take full responsibility for the change. In the case of pull requests proposed by an existing WG member, an additional WG member is required for sign-off. Consensus should be sought if additional WG members participate and there is disagreement around a particular modification. + +If a change proposal cannot reach a consensus, a WG member can call for a vote amongst the members of the WG. Simple majority wins. + + + +## Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + + + +### WG Members / Collaborators + + + + + + + + + + +
Rod VaggGitHub/rvaggTwitter/@rvagg
Benjamin ByholmGitHub/kkoopa-
Trevor NorrisGitHub/trevnorrisTwitter/@trevnorris
Nathan RajlichGitHub/TooTallNateTwitter/@TooTallNate
Brett LawsonGitHub/brett19Twitter/@brett19x
Ben NoordhuisGitHub/bnoordhuisTwitter/@bnoordhuis
David SiegelGitHub/agnatTwitter/@agnat
Michael Ira KrufkyGitHub/mkrufkyTwitter/@mkrufky
+ +## Licence & copyright + +Copyright (c) 2018 NAN WG Members / Collaborators (listed above). + +Native Abstractions for Node.js is licensed under an MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details. diff --git a/node_modules/nan/doc/asyncworker.md b/node_modules/nan/doc/asyncworker.md new file mode 100644 index 00000000..04231f83 --- /dev/null +++ b/node_modules/nan/doc/asyncworker.md @@ -0,0 +1,146 @@ +## Asynchronous work helpers + +`Nan::AsyncWorker`, `Nan::AsyncProgressWorker` and `Nan::AsyncProgressQueueWorker` are helper classes that make working with asynchronous code easier. + + - Nan::AsyncWorker + - Nan::AsyncProgressWorkerBase & Nan::AsyncProgressWorker + - Nan::AsyncProgressQueueWorker + - Nan::AsyncQueueWorker + + +### Nan::AsyncWorker + +`Nan::AsyncWorker` is an _abstract_ class that you can subclass to have much of the annoying asynchronous queuing and handling taken care of for you. It can even store arbitrary V8 objects for you and have them persist while the asynchronous work is in progress. + +This class internally handles the details of creating an [`AsyncResource`][AsyncResource], and running the callback in the +correct async context. To be able to identify the async resources created by this class in async-hooks, provide a +`resource_name` to the constructor. It is recommended that the module name be used as a prefix to the `resource_name` to avoid +collisions in the names. For more details see [`AsyncResource`][AsyncResource] documentation. The `resource_name` needs to stay valid for the lifetime of the worker instance. + +Definition: + +```c++ +class AsyncWorker { + public: + explicit AsyncWorker(Callback *callback_, const char* resource_name = "nan:AsyncWorker"); + + virtual ~AsyncWorker(); + + virtual void WorkComplete(); + + void SaveToPersistent(const char *key, const v8::Local &value); + + void SaveToPersistent(const v8::Local &key, + const v8::Local &value); + + void SaveToPersistent(uint32_t index, + const v8::Local &value); + + v8::Local GetFromPersistent(const char *key) const; + + v8::Local GetFromPersistent(const v8::Local &key) const; + + v8::Local GetFromPersistent(uint32_t index) const; + + virtual void Execute() = 0; + + uv_work_t request; + + virtual void Destroy(); + + protected: + Persistent persistentHandle; + + Callback *callback; + + virtual void HandleOKCallback(); + + virtual void HandleErrorCallback(); + + void SetErrorMessage(const char *msg); + + const char* ErrorMessage(); +}; +``` + + +### Nan::AsyncProgressWorkerBase & Nan::AsyncProgressWorker + +`Nan::AsyncProgressWorkerBase` is an _abstract_ class template that extends `Nan::AsyncWorker` and adds additional progress reporting callbacks that can be used during the asynchronous work execution to provide progress data back to JavaScript. + +Previously the definition of `Nan::AsyncProgressWorker` only allowed sending `const char` data. Now extending `Nan::AsyncProgressWorker` will yield an instance of the implicit `Nan::AsyncProgressWorkerBase` template with type `` for compatibility. + +`Nan::AsyncProgressWorkerBase` & `Nan::AsyncProgressWorker` is intended for best-effort delivery of nonessential progress messages, e.g. a progress bar. The last event sent before the main thread is woken will be delivered. + +Definition: + +```c++ +template +class AsyncProgressWorkerBase : public AsyncWorker { + public: + explicit AsyncProgressWorkerBase(Callback *callback_, const char* resource_name = ...); + + virtual ~AsyncProgressWorkerBase(); + + void WorkProgress(); + + class ExecutionProgress { + public: + void Signal() const; + void Send(const T* data, size_t count) const; + }; + + virtual void Execute(const ExecutionProgress& progress) = 0; + + virtual void HandleProgressCallback(const T *data, size_t count) = 0; + + virtual void Destroy(); +}; + +typedef AsyncProgressWorkerBase AsyncProgressWorker; +``` + + +### Nan::AsyncProgressQueueWorker + +`Nan::AsyncProgressQueueWorker` is an _abstract_ class template that extends `Nan::AsyncWorker` and adds additional progress reporting callbacks that can be used during the asynchronous work execution to provide progress data back to JavaScript. + +`Nan::AsyncProgressQueueWorker` behaves exactly the same as `Nan::AsyncProgressWorker`, except all events are queued and delivered to the main thread. + +Definition: + +```c++ +template +class AsyncProgressQueueWorker : public AsyncWorker { + public: + explicit AsyncProgressQueueWorker(Callback *callback_, const char* resource_name = "nan:AsyncProgressQueueWorker"); + + virtual ~AsyncProgressQueueWorker(); + + void WorkProgress(); + + class ExecutionProgress { + public: + void Send(const T* data, size_t count) const; + }; + + virtual void Execute(const ExecutionProgress& progress) = 0; + + virtual void HandleProgressCallback(const T *data, size_t count) = 0; + + virtual void Destroy(); +}; +``` + + +### Nan::AsyncQueueWorker + +`Nan::AsyncQueueWorker` will run a `Nan::AsyncWorker` asynchronously via libuv. Both the `execute` and `after_work` steps are taken care of for you. Most of the logic for this is embedded in `Nan::AsyncWorker`. + +Definition: + +```c++ +void AsyncQueueWorker(AsyncWorker *); +``` + +[AsyncResource]: node_misc.md#api_nan_asyncresource diff --git a/node_modules/nan/doc/buffers.md b/node_modules/nan/doc/buffers.md new file mode 100644 index 00000000..8d8d25cf --- /dev/null +++ b/node_modules/nan/doc/buffers.md @@ -0,0 +1,54 @@ +## Buffers + +NAN's `node::Buffer` helpers exist as the API has changed across supported Node versions. Use these methods to ensure compatibility. + + - Nan::NewBuffer() + - Nan::CopyBuffer() + - Nan::FreeCallback() + + +### Nan::NewBuffer() + +Allocate a new `node::Buffer` object with the specified size and optional data. Calls `node::Buffer::New()`. + +Note that when creating a `Buffer` using `Nan::NewBuffer()` and an existing `char*`, it is assumed that the ownership of the pointer is being transferred to the new `Buffer` for management. +When a `node::Buffer` instance is garbage collected and a `FreeCallback` has not been specified, `data` will be disposed of via a call to `free()`. +You _must not_ free the memory space manually once you have created a `Buffer` in this way. + +Signature: + +```c++ +Nan::MaybeLocal Nan::NewBuffer(uint32_t size) +Nan::MaybeLocal Nan::NewBuffer(char* data, uint32_t size) +Nan::MaybeLocal Nan::NewBuffer(char *data, + size_t length, + Nan::FreeCallback callback, + void *hint) +``` + + + +### Nan::CopyBuffer() + +Similar to [`Nan::NewBuffer()`](#api_nan_new_buffer) except that an implicit memcpy will occur within Node. Calls `node::Buffer::Copy()`. + +Management of the `char*` is left to the user, you should manually free the memory space if necessary as the new `Buffer` will have its own copy. + +Signature: + +```c++ +Nan::MaybeLocal Nan::CopyBuffer(const char *data, uint32_t size) +``` + + + +### Nan::FreeCallback() + +A free callback that can be provided to [`Nan::NewBuffer()`](#api_nan_new_buffer). +The supplied callback will be invoked when the `Buffer` undergoes garbage collection. + +Signature: + +```c++ +typedef void (*FreeCallback)(char *data, void *hint); +``` diff --git a/node_modules/nan/doc/callback.md b/node_modules/nan/doc/callback.md new file mode 100644 index 00000000..f7af0bfd --- /dev/null +++ b/node_modules/nan/doc/callback.md @@ -0,0 +1,76 @@ +## Nan::Callback + +`Nan::Callback` makes it easier to use `v8::Function` handles as callbacks. A class that wraps a `v8::Function` handle, protecting it from garbage collection and making it particularly useful for storage and use across asynchronous execution. + + - Nan::Callback + + +### Nan::Callback + +```c++ +class Callback { + public: + Callback(); + + explicit Callback(const v8::Local &fn); + + ~Callback(); + + bool operator==(const Callback &other) const; + + bool operator!=(const Callback &other) const; + + v8::Local operator*() const; + + MaybeLocal operator()(AsyncResource* async_resource, + v8::Local target, + int argc = 0, + v8::Local argv[] = 0) const; + + MaybeLocal operator()(AsyncResource* async_resource, + int argc = 0, + v8::Local argv[] = 0) const; + + void SetFunction(const v8::Local &fn); + + v8::Local GetFunction() const; + + bool IsEmpty() const; + + void Reset(const v8::Local &fn); + + void Reset(); + + MaybeLocal Call(v8::Local target, + int argc, + v8::Local argv[], + AsyncResource* async_resource) const; + MaybeLocal Call(int argc, + v8::Local argv[], + AsyncResource* async_resource) const; + + // Deprecated versions. Use the versions that accept an async_resource instead + // as they run the callback in the correct async context as specified by the + // resource. If you want to call a synchronous JS function (i.e. on a + // non-empty JS stack), you can use Nan::Call instead. + v8::Local operator()(v8::Local target, + int argc = 0, + v8::Local argv[] = 0) const; + + v8::Local operator()(int argc = 0, + v8::Local argv[] = 0) const; + v8::Local Call(v8::Local target, + int argc, + v8::Local argv[]) const; + + v8::Local Call(int argc, v8::Local argv[]) const; +}; +``` + +Example usage: + +```c++ +v8::Local function; +Nan::Callback callback(function); +callback.Call(0, 0); +``` diff --git a/node_modules/nan/doc/converters.md b/node_modules/nan/doc/converters.md new file mode 100644 index 00000000..d20861b5 --- /dev/null +++ b/node_modules/nan/doc/converters.md @@ -0,0 +1,41 @@ +## Converters + +NAN contains functions that convert `v8::Value`s to other `v8::Value` types and native types. Since type conversion is not guaranteed to succeed, they return `Nan::Maybe` types. These converters can be used in place of `value->ToX()` and `value->XValue()` (where `X` is one of the types, e.g. `Boolean`) in a way that provides a consistent interface across V8 versions. Newer versions of V8 use the new `v8::Maybe` and `v8::MaybeLocal` types for these conversions, older versions don't have this functionality so it is provided by NAN. + + - Nan::To() + + +### Nan::To() + +Converts a `v8::Local` to a different subtype of `v8::Value` or to a native data type. Returns a `Nan::MaybeLocal<>` or a `Nan::Maybe<>` accordingly. + +See [maybe_types.md](./maybe_types.md) for more information on `Nan::Maybe` types. + +Signatures: + +```c++ +// V8 types +Nan::MaybeLocal Nan::To(v8::Local val); +Nan::MaybeLocal Nan::To(v8::Local val); +Nan::MaybeLocal Nan::To(v8::Local val); +Nan::MaybeLocal Nan::To(v8::Local val); +Nan::MaybeLocal Nan::To(v8::Local val); +Nan::MaybeLocal Nan::To(v8::Local val); +Nan::MaybeLocal Nan::To(v8::Local val); + +// Native types +Nan::Maybe Nan::To(v8::Local val); +Nan::Maybe Nan::To(v8::Local val); +Nan::Maybe Nan::To(v8::Local val); +Nan::Maybe Nan::To(v8::Local val); +Nan::Maybe Nan::To(v8::Local val); +``` + +### Example + +```c++ +v8::Local val; +Nan::MaybeLocal str = Nan::To(val); +Nan::Maybe d = Nan::To(val); +``` + diff --git a/node_modules/nan/doc/errors.md b/node_modules/nan/doc/errors.md new file mode 100644 index 00000000..8127a548 --- /dev/null +++ b/node_modules/nan/doc/errors.md @@ -0,0 +1,226 @@ +## Errors + +NAN includes helpers for creating, throwing and catching Errors as much of this functionality varies across the supported versions of V8 and must be abstracted. + +Note that an Error object is simply a specialized form of `v8::Value`. + +Also consult the V8 Embedders Guide section on [Exceptions](https://developers.google.com/v8/embed#exceptions) for more information. + + - Nan::Error() + - Nan::RangeError() + - Nan::ReferenceError() + - Nan::SyntaxError() + - Nan::TypeError() + - Nan::ThrowError() + - Nan::ThrowRangeError() + - Nan::ThrowReferenceError() + - Nan::ThrowSyntaxError() + - Nan::ThrowTypeError() + - Nan::FatalException() + - Nan::ErrnoException() + - Nan::TryCatch + + + +### Nan::Error() + +Create a new Error object using the [v8::Exception](https://v8docs.nodesource.com/node-8.11/da/d6a/classv8_1_1_exception.html) class in a way that is compatible across the supported versions of V8. + +Note that an Error object is simply a specialized form of `v8::Value`. + +Signature: + +```c++ +v8::Local Nan::Error(const char *msg); +v8::Local Nan::Error(v8::Local msg); +``` + + + +### Nan::RangeError() + +Create a new RangeError object using the [v8::Exception](https://v8docs.nodesource.com/node-8.11/da/d6a/classv8_1_1_exception.html) class in a way that is compatible across the supported versions of V8. + +Note that an RangeError object is simply a specialized form of `v8::Value`. + +Signature: + +```c++ +v8::Local Nan::RangeError(const char *msg); +v8::Local Nan::RangeError(v8::Local msg); +``` + + + +### Nan::ReferenceError() + +Create a new ReferenceError object using the [v8::Exception](https://v8docs.nodesource.com/node-8.11/da/d6a/classv8_1_1_exception.html) class in a way that is compatible across the supported versions of V8. + +Note that an ReferenceError object is simply a specialized form of `v8::Value`. + +Signature: + +```c++ +v8::Local Nan::ReferenceError(const char *msg); +v8::Local Nan::ReferenceError(v8::Local msg); +``` + + + +### Nan::SyntaxError() + +Create a new SyntaxError object using the [v8::Exception](https://v8docs.nodesource.com/node-8.11/da/d6a/classv8_1_1_exception.html) class in a way that is compatible across the supported versions of V8. + +Note that an SyntaxError object is simply a specialized form of `v8::Value`. + +Signature: + +```c++ +v8::Local Nan::SyntaxError(const char *msg); +v8::Local Nan::SyntaxError(v8::Local msg); +``` + + + +### Nan::TypeError() + +Create a new TypeError object using the [v8::Exception](https://v8docs.nodesource.com/node-8.11/da/d6a/classv8_1_1_exception.html) class in a way that is compatible across the supported versions of V8. + +Note that an TypeError object is simply a specialized form of `v8::Value`. + +Signature: + +```c++ +v8::Local Nan::TypeError(const char *msg); +v8::Local Nan::TypeError(v8::Local msg); +``` + + + +### Nan::ThrowError() + +Throw an Error object (a specialized `v8::Value` as above) in the current context. If a `msg` is provided, a new Error object will be created. + +Signature: + +```c++ +void Nan::ThrowError(const char *msg); +void Nan::ThrowError(v8::Local msg); +void Nan::ThrowError(v8::Local error); +``` + + + +### Nan::ThrowRangeError() + +Throw an RangeError object (a specialized `v8::Value` as above) in the current context. If a `msg` is provided, a new RangeError object will be created. + +Signature: + +```c++ +void Nan::ThrowRangeError(const char *msg); +void Nan::ThrowRangeError(v8::Local msg); +void Nan::ThrowRangeError(v8::Local error); +``` + + + +### Nan::ThrowReferenceError() + +Throw an ReferenceError object (a specialized `v8::Value` as above) in the current context. If a `msg` is provided, a new ReferenceError object will be created. + +Signature: + +```c++ +void Nan::ThrowReferenceError(const char *msg); +void Nan::ThrowReferenceError(v8::Local msg); +void Nan::ThrowReferenceError(v8::Local error); +``` + + + +### Nan::ThrowSyntaxError() + +Throw an SyntaxError object (a specialized `v8::Value` as above) in the current context. If a `msg` is provided, a new SyntaxError object will be created. + +Signature: + +```c++ +void Nan::ThrowSyntaxError(const char *msg); +void Nan::ThrowSyntaxError(v8::Local msg); +void Nan::ThrowSyntaxError(v8::Local error); +``` + + + +### Nan::ThrowTypeError() + +Throw an TypeError object (a specialized `v8::Value` as above) in the current context. If a `msg` is provided, a new TypeError object will be created. + +Signature: + +```c++ +void Nan::ThrowTypeError(const char *msg); +void Nan::ThrowTypeError(v8::Local msg); +void Nan::ThrowTypeError(v8::Local error); +``` + + +### Nan::FatalException() + +Replaces `node::FatalException()` which has a different API across supported versions of Node. For use with [`Nan::TryCatch`](#api_nan_try_catch). + +Signature: + +```c++ +void Nan::FatalException(const Nan::TryCatch& try_catch); +``` + + +### Nan::ErrnoException() + +Replaces `node::ErrnoException()` which has a different API across supported versions of Node. + +Signature: + +```c++ +v8::Local Nan::ErrnoException(int errorno, + const char* syscall = NULL, + const char* message = NULL, + const char* path = NULL); +``` + + + +### Nan::TryCatch + +A simple wrapper around [`v8::TryCatch`](https://v8docs.nodesource.com/node-8.11/d4/dc6/classv8_1_1_try_catch.html) compatible with all supported versions of V8. Can be used as a direct replacement in most cases. See also [`Nan::FatalException()`](#api_nan_fatal_exception) for an internal use compatible with `node::FatalException`. + +Signature: + +```c++ +class Nan::TryCatch { + public: + Nan::TryCatch(); + + bool HasCaught() const; + + bool CanContinue() const; + + v8::Local ReThrow(); + + v8::Local Exception() const; + + // Nan::MaybeLocal for older versions of V8 + v8::MaybeLocal StackTrace() const; + + v8::Local Message() const; + + void Reset(); + + void SetVerbose(bool value); + + void SetCaptureMessage(bool value); +}; +``` + diff --git a/node_modules/nan/doc/json.md b/node_modules/nan/doc/json.md new file mode 100644 index 00000000..4fa78dba --- /dev/null +++ b/node_modules/nan/doc/json.md @@ -0,0 +1,62 @@ +## JSON + +The _JSON_ object provides the c++ versions of the methods offered by the `JSON` object in javascript. V8 exposes these methods via the `v8::JSON` object. + + - Nan::JSON.Parse + - Nan::JSON.Stringify + +Refer to the V8 JSON object in the [V8 documentation](https://v8docs.nodesource.com/node-8.11/da/d6f/classv8_1_1_j_s_o_n.html) for more information about these methods and their arguments. + + + +### Nan::JSON.Parse + +A simple wrapper around [`v8::JSON::Parse`](https://v8docs.nodesource.com/node-8.11/da/d6f/classv8_1_1_j_s_o_n.html#a936310d2540fb630ed37d3ee3ffe4504). + +Definition: + +```c++ +Nan::MaybeLocal Nan::JSON::Parse(v8::Local json_string); +``` + +Use `JSON.Parse(json_string)` to parse a string into a `v8::Value`. + +Example: + +```c++ +v8::Local json_string = Nan::New("{ \"JSON\": \"object\" }").ToLocalChecked(); + +Nan::JSON NanJSON; +Nan::MaybeLocal result = NanJSON.Parse(json_string); +if (!result.IsEmpty()) { + v8::Local val = result.ToLocalChecked(); +} +``` + + + +### Nan::JSON.Stringify + +A simple wrapper around [`v8::JSON::Stringify`](https://v8docs.nodesource.com/node-8.11/da/d6f/classv8_1_1_j_s_o_n.html#a44b255c3531489ce43f6110209138860). + +Definition: + +```c++ +Nan::MaybeLocal Nan::JSON::Stringify(v8::Local json_object, v8::Local gap = v8::Local()); +``` + +Use `JSON.Stringify(value)` to stringify a `v8::Object`. + +Example: + +```c++ +// using `v8::Local val` from the `JSON::Parse` example +v8::Local obj = Nan::To(val).ToLocalChecked(); + +Nan::JSON NanJSON; +Nan::MaybeLocal result = NanJSON.Stringify(obj); +if (!result.IsEmpty()) { + v8::Local stringified = result.ToLocalChecked(); +} +``` + diff --git a/node_modules/nan/doc/maybe_types.md b/node_modules/nan/doc/maybe_types.md new file mode 100644 index 00000000..1a9fabf7 --- /dev/null +++ b/node_modules/nan/doc/maybe_types.md @@ -0,0 +1,583 @@ +## Maybe Types + +The `Nan::MaybeLocal` and `Nan::Maybe` types are monads that encapsulate `v8::Local` handles that _may be empty_. + +* **Maybe Types** + - Nan::MaybeLocal + - Nan::Maybe + - Nan::Nothing + - Nan::Just +* **Maybe Helpers** + - Nan::Call() + - Nan::ToDetailString() + - Nan::ToArrayIndex() + - Nan::Equals() + - Nan::NewInstance() + - Nan::GetFunction() + - Nan::Set() + - Nan::DefineOwnProperty() + - Nan::ForceSet() + - Nan::Get() + - Nan::GetPropertyAttributes() + - Nan::Has() + - Nan::Delete() + - Nan::GetPropertyNames() + - Nan::GetOwnPropertyNames() + - Nan::SetPrototype() + - Nan::ObjectProtoToString() + - Nan::HasOwnProperty() + - Nan::HasRealNamedProperty() + - Nan::HasRealIndexedProperty() + - Nan::HasRealNamedCallbackProperty() + - Nan::GetRealNamedPropertyInPrototypeChain() + - Nan::GetRealNamedProperty() + - Nan::CallAsFunction() + - Nan::CallAsConstructor() + - Nan::GetSourceLine() + - Nan::GetLineNumber() + - Nan::GetStartColumn() + - Nan::GetEndColumn() + - Nan::CloneElementAt() + - Nan::HasPrivate() + - Nan::GetPrivate() + - Nan::SetPrivate() + - Nan::DeletePrivate() + - Nan::MakeMaybe() + + +### Nan::MaybeLocal + +A `Nan::MaybeLocal` is a wrapper around [`v8::Local`](https://v8docs.nodesource.com/node-8.11/de/deb/classv8_1_1_local.html) that enforces a check that determines whether the `v8::Local` is empty before it can be used. + +If an API method returns a `Nan::MaybeLocal`, the API method can potentially fail either because an exception is thrown, or because an exception is pending, e.g. because a previous API call threw an exception that hasn't been caught yet, or because a `v8::TerminateExecution` exception was thrown. In that case, an empty `Nan::MaybeLocal` is returned. + +Definition: + +```c++ +template class Nan::MaybeLocal { + public: + MaybeLocal(); + + template MaybeLocal(v8::Local that); + + bool IsEmpty() const; + + template bool ToLocal(v8::Local *out); + + // Will crash if the MaybeLocal<> is empty. + v8::Local ToLocalChecked(); + + template v8::Local FromMaybe(v8::Local default_value) const; +}; +``` + +See the documentation for [`v8::MaybeLocal`](https://v8docs.nodesource.com/node-8.11/d8/d7d/classv8_1_1_maybe_local.html) for further details. + + +### Nan::Maybe + +A simple `Nan::Maybe` type, representing an object which may or may not have a value, see https://hackage.haskell.org/package/base/docs/Data-Maybe.html. + +If an API method returns a `Nan::Maybe<>`, the API method can potentially fail either because an exception is thrown, or because an exception is pending, e.g. because a previous API call threw an exception that hasn't been caught yet, or because a `v8::TerminateExecution` exception was thrown. In that case, a "Nothing" value is returned. + +Definition: + +```c++ +template class Nan::Maybe { + public: + bool IsNothing() const; + bool IsJust() const; + + // Will crash if the Maybe<> is nothing. + T FromJust(); + + T FromMaybe(const T& default_value); + + bool operator==(const Maybe &other); + + bool operator!=(const Maybe &other); +}; +``` + +See the documentation for [`v8::Maybe`](https://v8docs.nodesource.com/node-8.11/d9/d4b/classv8_1_1_maybe.html) for further details. + + +### Nan::Nothing + +Construct an empty `Nan::Maybe` type representing _nothing_. + +```c++ +template Nan::Maybe Nan::Nothing(); +``` + + +### Nan::Just + +Construct a `Nan::Maybe` type representing _just_ a value. + +```c++ +template Nan::Maybe Nan::Just(const T &t); +``` + + +### Nan::Call() + +A helper method for calling a synchronous [`v8::Function#Call()`](https://v8docs.nodesource.com/node-8.11/d5/d54/classv8_1_1_function.html#a9c3d0e4e13ddd7721fce238aa5b94a11) in a way compatible across supported versions of V8. + +For asynchronous callbacks, use Nan::Callback::Call along with an AsyncResource. + +Signature: + +```c++ +Nan::MaybeLocal Nan::Call(v8::Local fun, v8::Local recv, int argc, v8::Local argv[]); +Nan::MaybeLocal Nan::Call(const Nan::Callback& callback, v8::Local recv, + int argc, v8::Local argv[]); +Nan::MaybeLocal Nan::Call(const Nan::Callback& callback, int argc, v8::Local argv[]); +``` + + + +### Nan::ToDetailString() + +A helper method for calling [`v8::Value#ToDetailString()`](https://v8docs.nodesource.com/node-8.11/dc/d0a/classv8_1_1_value.html#a2f9770296dc2c8d274bc8cc0dca243e5) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::ToDetailString(v8::Local val); +``` + + + +### Nan::ToArrayIndex() + +A helper method for calling [`v8::Value#ToArrayIndex()`](https://v8docs.nodesource.com/node-8.11/dc/d0a/classv8_1_1_value.html#acc5bbef3c805ec458470c0fcd6f13493) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::ToArrayIndex(v8::Local val); +``` + + + +### Nan::Equals() + +A helper method for calling [`v8::Value#Equals()`](https://v8docs.nodesource.com/node-8.11/dc/d0a/classv8_1_1_value.html#a08fba1d776a59bbf6864b25f9152c64b) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::Equals(v8::Local a, v8::Local(b)); +``` + + + +### Nan::NewInstance() + +A helper method for calling [`v8::Function#NewInstance()`](https://v8docs.nodesource.com/node-8.11/d5/d54/classv8_1_1_function.html#ae477558b10c14b76ed00e8dbab44ce5b) and [`v8::ObjectTemplate#NewInstance()`](https://v8docs.nodesource.com/node-8.11/db/d5f/classv8_1_1_object_template.html#ad605a7543cfbc5dab54cdb0883d14ae4) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::NewInstance(v8::Local h); +Nan::MaybeLocal Nan::NewInstance(v8::Local h, int argc, v8::Local argv[]); +Nan::MaybeLocal Nan::NewInstance(v8::Local h); +``` + + + +### Nan::GetFunction() + +A helper method for calling [`v8::FunctionTemplate#GetFunction()`](https://v8docs.nodesource.com/node-8.11/d8/d83/classv8_1_1_function_template.html#a56d904662a86eca78da37d9bb0ed3705) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::GetFunction(v8::Local t); +``` + + + +### Nan::Set() + +A helper method for calling [`v8::Object#Set()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a67604ea3734f170c66026064ea808f20) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::Set(v8::Local obj, + v8::Local key, + v8::Local value) +Nan::Maybe Nan::Set(v8::Local obj, + uint32_t index, + v8::Local value); +``` + + + +### Nan::DefineOwnProperty() + +A helper method for calling [`v8::Object#DefineOwnProperty()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a6f76b2ed605cb8f9185b92de0033a820) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::DefineOwnProperty(v8::Local obj, + v8::Local key, + v8::Local value, + v8::PropertyAttribute attribs = v8::None); +``` + + + +### Nan::ForceSet() + +Deprecated, use Nan::DefineOwnProperty(). + +A helper method for calling [`v8::Object#ForceSet()`](https://v8docs.nodesource.com/node-0.12/db/d85/classv8_1_1_object.html#acfbdfd7427b516ebdb5c47c4df5ed96c) in a way compatible across supported versions of V8. + +Signature: + +```c++ +NAN_DEPRECATED Nan::Maybe Nan::ForceSet(v8::Local obj, + v8::Local key, + v8::Local value, + v8::PropertyAttribute attribs = v8::None); +``` + + + +### Nan::Get() + +A helper method for calling [`v8::Object#Get()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a2565f03e736694f6b1e1cf22a0b4eac2) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::Get(v8::Local obj, + v8::Local key); +Nan::MaybeLocal Nan::Get(v8::Local obj, uint32_t index); +``` + + + +### Nan::GetPropertyAttributes() + +A helper method for calling [`v8::Object#GetPropertyAttributes()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a9b898894da3d1db2714fd9325a54fe57) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::GetPropertyAttributes( + v8::Local obj, + v8::Local key); +``` + + + +### Nan::Has() + +A helper method for calling [`v8::Object#Has()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#ab3c3d89ea7c2f9afd08965bd7299a41d) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::Has(v8::Local obj, v8::Local key); +Nan::Maybe Nan::Has(v8::Local obj, uint32_t index); +``` + + + +### Nan::Delete() + +A helper method for calling [`v8::Object#Delete()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a48e4a19b2cedff867eecc73ddb7d377f) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::Delete(v8::Local obj, + v8::Local key); +Nan::Maybe Nan::Delete(v8::Local obj, uint32_t index); +``` + + + +### Nan::GetPropertyNames() + +A helper method for calling [`v8::Object#GetPropertyNames()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#aced885270cfd2c956367b5eedc7fbfe8) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::GetPropertyNames(v8::Local obj); +``` + + + +### Nan::GetOwnPropertyNames() + +A helper method for calling [`v8::Object#GetOwnPropertyNames()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a79a6e4d66049b9aa648ed4dfdb23e6eb) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::GetOwnPropertyNames(v8::Local obj); +``` + + + +### Nan::SetPrototype() + +A helper method for calling [`v8::Object#SetPrototype()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a442706b22fceda6e6d1f632122a9a9f4) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::SetPrototype(v8::Local obj, + v8::Local prototype); +``` + + + +### Nan::ObjectProtoToString() + +A helper method for calling [`v8::Object#ObjectProtoToString()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#ab7a92b4dcf822bef72f6c0ac6fea1f0b) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::ObjectProtoToString(v8::Local obj); +``` + + + +### Nan::HasOwnProperty() + +A helper method for calling [`v8::Object#HasOwnProperty()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#ab7b7245442ca6de1e1c145ea3fd653ff) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::HasOwnProperty(v8::Local obj, + v8::Local key); +``` + + + +### Nan::HasRealNamedProperty() + +A helper method for calling [`v8::Object#HasRealNamedProperty()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#ad8b80a59c9eb3c1e6c3cd6c84571f767) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::HasRealNamedProperty(v8::Local obj, + v8::Local key); +``` + + + +### Nan::HasRealIndexedProperty() + +A helper method for calling [`v8::Object#HasRealIndexedProperty()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#af94fc1135a5e74a2193fb72c3a1b9855) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::HasRealIndexedProperty(v8::Local obj, + uint32_t index); +``` + + + +### Nan::HasRealNamedCallbackProperty() + +A helper method for calling [`v8::Object#HasRealNamedCallbackProperty()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#af743b7ea132b89f84d34d164d0668811) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::HasRealNamedCallbackProperty( + v8::Local obj, + v8::Local key); +``` + + + +### Nan::GetRealNamedPropertyInPrototypeChain() + +A helper method for calling [`v8::Object#GetRealNamedPropertyInPrototypeChain()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a8700b1862e6b4783716964ba4d5e6172) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::GetRealNamedPropertyInPrototypeChain( + v8::Local obj, + v8::Local key); +``` + + + +### Nan::GetRealNamedProperty() + +A helper method for calling [`v8::Object#GetRealNamedProperty()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a84471a824576a5994fdd0ffcbf99ccc0) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::GetRealNamedProperty(v8::Local obj, + v8::Local key); +``` + + + +### Nan::CallAsFunction() + +A helper method for calling [`v8::Object#CallAsFunction()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#ad3ffc36f3dfc3592ce2a96bc047ee2cd) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::CallAsFunction(v8::Local obj, + v8::Local recv, + int argc, + v8::Local argv[]); +``` + + + +### Nan::CallAsConstructor() + +A helper method for calling [`v8::Object#CallAsConstructor()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a50d571de50d0b0dfb28795619d07a01b) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::CallAsConstructor(v8::Local obj, + int argc, + v8::Local argv[]); +``` + + + +### Nan::GetSourceLine() + +A helper method for calling [`v8::Message#GetSourceLine()`](https://v8docs.nodesource.com/node-8.11/d9/d28/classv8_1_1_message.html#a849f7a6c41549d83d8159825efccd23a) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::GetSourceLine(v8::Local msg); +``` + + + +### Nan::GetLineNumber() + +A helper method for calling [`v8::Message#GetLineNumber()`](https://v8docs.nodesource.com/node-8.11/d9/d28/classv8_1_1_message.html#adbe46c10a88a6565f2732a2d2adf99b9) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::GetLineNumber(v8::Local msg); +``` + + + +### Nan::GetStartColumn() + +A helper method for calling [`v8::Message#GetStartColumn()`](https://v8docs.nodesource.com/node-8.11/d9/d28/classv8_1_1_message.html#a60ede616ba3822d712e44c7a74487ba6) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::GetStartColumn(v8::Local msg); +``` + + + +### Nan::GetEndColumn() + +A helper method for calling [`v8::Message#GetEndColumn()`](https://v8docs.nodesource.com/node-8.11/d9/d28/classv8_1_1_message.html#aaa004cf19e529da980bc19fcb76d93be) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::GetEndColumn(v8::Local msg); +``` + + + +### Nan::CloneElementAt() + +A helper method for calling [`v8::Array#CloneElementAt()`](https://v8docs.nodesource.com/node-4.8/d3/d32/classv8_1_1_array.html#a1d3a878d4c1c7cae974dd50a1639245e) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::CloneElementAt(v8::Local array, uint32_t index); +``` + + +### Nan::HasPrivate() + +A helper method for calling [`v8::Object#HasPrivate()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#af68a0b98066cfdeb8f943e98a40ba08d) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::HasPrivate(v8::Local object, v8::Local key); +``` + + +### Nan::GetPrivate() + +A helper method for calling [`v8::Object#GetPrivate()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a169f2da506acbec34deadd9149a1925a) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::GetPrivate(v8::Local object, v8::Local key); +``` + + +### Nan::SetPrivate() + +A helper method for calling [`v8::Object#SetPrivate()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#ace1769b0f3b86bfe9fda1010916360ee) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::SetPrivate(v8::Local object, v8::Local key, v8::Local value); +``` + + +### Nan::DeletePrivate() + +A helper method for calling [`v8::Object#DeletePrivate()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a138bb32a304f3982be02ad499693b8fd) in a way compatible across supported versions of V8. + +Signature: + +```c++ +Nan::Maybe Nan::DeletePrivate(v8::Local object, v8::Local key); +``` + + +### Nan::MakeMaybe() + +Wraps a `v8::Local<>` in a `Nan::MaybeLocal<>`. When called with a `Nan::MaybeLocal<>` it just returns its argument. This is useful in generic template code that builds on NAN. + +Synopsis: + +```c++ + MaybeLocal someNumber = MakeMaybe(New(3.141592654)); + MaybeLocal someString = MakeMaybe(New("probably")); +``` + +Signature: + +```c++ +template class MaybeMaybe> +Nan::MaybeLocal Nan::MakeMaybe(MaybeMaybe v); +``` diff --git a/node_modules/nan/doc/methods.md b/node_modules/nan/doc/methods.md new file mode 100644 index 00000000..b2b26c38 --- /dev/null +++ b/node_modules/nan/doc/methods.md @@ -0,0 +1,661 @@ +## JavaScript-accessible methods + +A _template_ is a blueprint for JavaScript functions and objects in a context. You can use a template to wrap C++ functions and data structures within JavaScript objects so that they can be manipulated from JavaScript. See the V8 Embedders Guide section on [Templates](https://github.com/v8/v8/wiki/Embedder%27s-Guide#templates) for further information. + +In order to expose functionality to JavaScript via a template, you must provide it to V8 in a form that it understands. Across the versions of V8 supported by NAN, JavaScript-accessible method signatures vary widely, NAN fully abstracts method declaration and provides you with an interface that is similar to the most recent V8 API but is backward-compatible with older versions that still use the now-deceased `v8::Argument` type. + +* **Method argument types** + - Nan::FunctionCallbackInfo + - Nan::PropertyCallbackInfo + - Nan::ReturnValue +* **Method declarations** + - Method declaration + - Getter declaration + - Setter declaration + - Property getter declaration + - Property setter declaration + - Property enumerator declaration + - Property deleter declaration + - Property query declaration + - Index getter declaration + - Index setter declaration + - Index enumerator declaration + - Index deleter declaration + - Index query declaration +* Method and template helpers + - Nan::SetMethod() + - Nan::SetPrototypeMethod() + - Nan::SetAccessor() + - Nan::SetNamedPropertyHandler() + - Nan::SetIndexedPropertyHandler() + - Nan::SetTemplate() + - Nan::SetPrototypeTemplate() + - Nan::SetInstanceTemplate() + - Nan::SetCallHandler() + - Nan::SetCallAsFunctionHandler() + + +### Nan::FunctionCallbackInfo + +`Nan::FunctionCallbackInfo` should be used in place of [`v8::FunctionCallbackInfo`](https://v8docs.nodesource.com/node-8.11/dd/d0d/classv8_1_1_function_callback_info.html), even with older versions of Node where `v8::FunctionCallbackInfo` does not exist. + +Definition: + +```c++ +template class FunctionCallbackInfo { + public: + ReturnValue GetReturnValue() const; + v8::Local Callee(); // NOTE: Not available in NodeJS >= 10.0.0 + v8::Local Data(); + v8::Local Holder(); + bool IsConstructCall(); + int Length() const; + v8::Local operator[](int i) const; + v8::Local This() const; + v8::Isolate *GetIsolate() const; +}; +``` + +See the [`v8::FunctionCallbackInfo`](https://v8docs.nodesource.com/node-8.11/dd/d0d/classv8_1_1_function_callback_info.html) documentation for usage details on these. See [`Nan::ReturnValue`](#api_nan_return_value) for further information on how to set a return value from methods. + +**Note:** `FunctionCallbackInfo::Callee` is removed in Node.js after `10.0.0` because it is was deprecated in V8. Consider using `info.Data()` to pass any information you need. + + +### Nan::PropertyCallbackInfo + +`Nan::PropertyCallbackInfo` should be used in place of [`v8::PropertyCallbackInfo`](https://v8docs.nodesource.com/node-8.11/d7/dc5/classv8_1_1_property_callback_info.html), even with older versions of Node where `v8::PropertyCallbackInfo` does not exist. + +Definition: + +```c++ +template class PropertyCallbackInfo : public PropertyCallbackInfoBase { + public: + ReturnValue GetReturnValue() const; + v8::Isolate* GetIsolate() const; + v8::Local Data() const; + v8::Local This() const; + v8::Local Holder() const; +}; +``` + +See the [`v8::PropertyCallbackInfo`](https://v8docs.nodesource.com/node-8.11/d7/dc5/classv8_1_1_property_callback_info.html) documentation for usage details on these. See [`Nan::ReturnValue`](#api_nan_return_value) for further information on how to set a return value from property accessor methods. + + +### Nan::ReturnValue + +`Nan::ReturnValue` is used in place of [`v8::ReturnValue`](https://v8docs.nodesource.com/node-8.11/da/da7/classv8_1_1_return_value.html) on both [`Nan::FunctionCallbackInfo`](#api_nan_function_callback_info) and [`Nan::PropertyCallbackInfo`](#api_nan_property_callback_info) as the return type of `GetReturnValue()`. + +Example usage: + +```c++ +void EmptyArray(const Nan::FunctionCallbackInfo& info) { + info.GetReturnValue().Set(Nan::New()); +} +``` + +Definition: + +```c++ +template class ReturnValue { + public: + // Handle setters + template void Set(const v8::Local &handle); + template void Set(const Nan::Global &handle); + + // Fast primitive setters + void Set(bool value); + void Set(double i); + void Set(int32_t i); + void Set(uint32_t i); + + // Fast JS primitive setters + void SetNull(); + void SetUndefined(); + void SetEmptyString(); + + // Convenience getter for isolate + v8::Isolate *GetIsolate() const; +}; +``` + +See the documentation on [`v8::ReturnValue`](https://v8docs.nodesource.com/node-8.11/da/da7/classv8_1_1_return_value.html) for further information on this. + + +### Method declaration + +JavaScript-accessible methods should be declared with the following signature to form a `Nan::FunctionCallback`: + +```c++ +typedef void(*FunctionCallback)(const FunctionCallbackInfo&); +``` + +Example: + +```c++ +void MethodName(const Nan::FunctionCallbackInfo& info) { + ... +} +``` + +You do not need to declare a new `HandleScope` within a method as one is implicitly created for you. + +**Example usage** + +```c++ +// .h: +class Foo : public Nan::ObjectWrap { + ... + + static void Bar(const Nan::FunctionCallbackInfo& info); + static void Baz(const Nan::FunctionCallbackInfo& info); +} + + +// .cc: +void Foo::Bar(const Nan::FunctionCallbackInfo& info) { + ... +} + +void Foo::Baz(const Nan::FunctionCallbackInfo& info) { + ... +} +``` + +A helper macro `NAN_METHOD(methodname)` exists, compatible with NAN v1 method declarations. + +**Example usage with `NAN_METHOD(methodname)`** + +```c++ +// .h: +class Foo : public Nan::ObjectWrap { + ... + + static NAN_METHOD(Bar); + static NAN_METHOD(Baz); +} + + +// .cc: +NAN_METHOD(Foo::Bar) { + ... +} + +NAN_METHOD(Foo::Baz) { + ... +} +``` + +Use [`Nan::SetPrototypeMethod`](#api_nan_set_prototype_method) to attach a method to a JavaScript function prototype or [`Nan::SetMethod`](#api_nan_set_method) to attach a method directly on a JavaScript object. + + +### Getter declaration + +JavaScript-accessible getters should be declared with the following signature to form a `Nan::GetterCallback`: + +```c++ +typedef void(*GetterCallback)(v8::Local, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void GetterName(v8::Local property, + const Nan::PropertyCallbackInfo& info) { + ... +} +``` + +You do not need to declare a new `HandleScope` within a getter as one is implicitly created for you. + +A helper macro `NAN_GETTER(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on [Accessors](https://developers.google.com/v8/embed#accesssors). + + +### Setter declaration + +JavaScript-accessible setters should be declared with the following signature to form a Nan::SetterCallback: + +```c++ +typedef void(*SetterCallback)(v8::Local, + v8::Local, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void SetterName(v8::Local property, + v8::Local value, + const Nan::PropertyCallbackInfo& info) { + ... +} +``` + +You do not need to declare a new `HandleScope` within a setter as one is implicitly created for you. + +A helper macro `NAN_SETTER(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on [Accessors](https://developers.google.com/v8/embed#accesssors). + + +### Property getter declaration + +JavaScript-accessible property getters should be declared with the following signature to form a Nan::PropertyGetterCallback: + +```c++ +typedef void(*PropertyGetterCallback)(v8::Local, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void PropertyGetterName(v8::Local property, + const Nan::PropertyCallbackInfo& info) { + ... +} +``` + +You do not need to declare a new `HandleScope` within a property getter as one is implicitly created for you. + +A helper macro `NAN_PROPERTY_GETTER(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on named property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Property setter declaration + +JavaScript-accessible property setters should be declared with the following signature to form a Nan::PropertySetterCallback: + +```c++ +typedef void(*PropertySetterCallback)(v8::Local, + v8::Local, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void PropertySetterName(v8::Local property, + v8::Local value, + const Nan::PropertyCallbackInfo& info); +``` + +You do not need to declare a new `HandleScope` within a property setter as one is implicitly created for you. + +A helper macro `NAN_PROPERTY_SETTER(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on named property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Property enumerator declaration + +JavaScript-accessible property enumerators should be declared with the following signature to form a Nan::PropertyEnumeratorCallback: + +```c++ +typedef void(*PropertyEnumeratorCallback)(const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void PropertyEnumeratorName(const Nan::PropertyCallbackInfo& info); +``` + +You do not need to declare a new `HandleScope` within a property enumerator as one is implicitly created for you. + +A helper macro `NAN_PROPERTY_ENUMERATOR(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on named property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Property deleter declaration + +JavaScript-accessible property deleters should be declared with the following signature to form a Nan::PropertyDeleterCallback: + +```c++ +typedef void(*PropertyDeleterCallback)(v8::Local, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void PropertyDeleterName(v8::Local property, + const Nan::PropertyCallbackInfo& info); +``` + +You do not need to declare a new `HandleScope` within a property deleter as one is implicitly created for you. + +A helper macro `NAN_PROPERTY_DELETER(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on named property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Property query declaration + +JavaScript-accessible property query methods should be declared with the following signature to form a Nan::PropertyQueryCallback: + +```c++ +typedef void(*PropertyQueryCallback)(v8::Local, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void PropertyQueryName(v8::Local property, + const Nan::PropertyCallbackInfo& info); +``` + +You do not need to declare a new `HandleScope` within a property query method as one is implicitly created for you. + +A helper macro `NAN_PROPERTY_QUERY(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on named property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Index getter declaration + +JavaScript-accessible index getter methods should be declared with the following signature to form a Nan::IndexGetterCallback: + +```c++ +typedef void(*IndexGetterCallback)(uint32_t, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void IndexGetterName(uint32_t index, const PropertyCallbackInfo& info); +``` + +You do not need to declare a new `HandleScope` within a index getter as one is implicitly created for you. + +A helper macro `NAN_INDEX_GETTER(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on indexed property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Index setter declaration + +JavaScript-accessible index setter methods should be declared with the following signature to form a Nan::IndexSetterCallback: + +```c++ +typedef void(*IndexSetterCallback)(uint32_t, + v8::Local, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void IndexSetterName(uint32_t index, + v8::Local value, + const PropertyCallbackInfo& info); +``` + +You do not need to declare a new `HandleScope` within a index setter as one is implicitly created for you. + +A helper macro `NAN_INDEX_SETTER(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on indexed property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Index enumerator declaration + +JavaScript-accessible index enumerator methods should be declared with the following signature to form a Nan::IndexEnumeratorCallback: + +```c++ +typedef void(*IndexEnumeratorCallback)(const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void IndexEnumeratorName(const PropertyCallbackInfo& info); +``` + +You do not need to declare a new `HandleScope` within a index enumerator as one is implicitly created for you. + +A helper macro `NAN_INDEX_ENUMERATOR(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on indexed property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Index deleter declaration + +JavaScript-accessible index deleter methods should be declared with the following signature to form a Nan::IndexDeleterCallback: + +```c++ +typedef void(*IndexDeleterCallback)(uint32_t, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void IndexDeleterName(uint32_t index, const PropertyCallbackInfo& info); +``` + +You do not need to declare a new `HandleScope` within a index deleter as one is implicitly created for you. + +A helper macro `NAN_INDEX_DELETER(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on indexed property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Index query declaration + +JavaScript-accessible index query methods should be declared with the following signature to form a Nan::IndexQueryCallback: + +```c++ +typedef void(*IndexQueryCallback)(uint32_t, + const PropertyCallbackInfo&); +``` + +Example: + +```c++ +void IndexQueryName(uint32_t index, const PropertyCallbackInfo& info); +``` + +You do not need to declare a new `HandleScope` within a index query method as one is implicitly created for you. + +A helper macro `NAN_INDEX_QUERY(methodname)` exists, compatible with NAN v1 method declarations. + +Also see the V8 Embedders Guide documentation on indexed property [Interceptors](https://developers.google.com/v8/embed#interceptors). + + +### Nan::SetMethod() + +Sets a method with a given name directly on a JavaScript object where the method has the `Nan::FunctionCallback` signature (see Method declaration). + +Signature: + +```c++ +void Nan::SetMethod(v8::Local recv, + const char *name, + Nan::FunctionCallback callback) +void Nan::SetMethod(v8::Local templ, + const char *name, + Nan::FunctionCallback callback) +``` + + +### Nan::SetPrototypeMethod() + +Sets a method with a given name on a `FunctionTemplate`'s prototype where the method has the `Nan::FunctionCallback` signature (see Method declaration). + +Signature: + +```c++ +void Nan::SetPrototypeMethod(v8::Local recv, + const char* name, + Nan::FunctionCallback callback) +``` + + +### Nan::SetAccessor() + +Sets getters and setters for a property with a given name on an `ObjectTemplate` or a plain `Object`. Accepts getters with the `Nan::GetterCallback` signature (see Getter declaration) and setters with the `Nan::SetterCallback` signature (see Setter declaration). + +Signature: + +```c++ +void SetAccessor(v8::Local tpl, + v8::Local name, + Nan::GetterCallback getter, + Nan::SetterCallback setter = 0, + v8::Local data = v8::Local(), + v8::AccessControl settings = v8::DEFAULT, + v8::PropertyAttribute attribute = v8::None, + imp::Sig signature = imp::Sig()); +bool SetAccessor(v8::Local obj, + v8::Local name, + Nan::GetterCallback getter, + Nan::SetterCallback setter = 0, + v8::Local data = v8::Local(), + v8::AccessControl settings = v8::DEFAULT, + v8::PropertyAttribute attribute = v8::None) +``` + +See the V8 [`ObjectTemplate#SetAccessor()`](https://v8docs.nodesource.com/node-8.11/db/d5f/classv8_1_1_object_template.html#aca0ed196f8a9adb1f68b1aadb6c9cd77) and [`Object#SetAccessor()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#ae91b3b56b357f285288c89fbddc46d1b) for further information about how to use `Nan::SetAccessor()`. + + +### Nan::SetNamedPropertyHandler() + +Sets named property getters, setters, query, deleter and enumerator methods on an `ObjectTemplate`. Accepts: + +* Property getters with the `Nan::PropertyGetterCallback` signature (see Property getter declaration) +* Property setters with the `Nan::PropertySetterCallback` signature (see Property setter declaration) +* Property query methods with the `Nan::PropertyQueryCallback` signature (see Property query declaration) +* Property deleters with the `Nan::PropertyDeleterCallback` signature (see Property deleter declaration) +* Property enumerators with the `Nan::PropertyEnumeratorCallback` signature (see Property enumerator declaration) + +Signature: + +```c++ +void SetNamedPropertyHandler(v8::Local tpl, + Nan::PropertyGetterCallback getter, + Nan::PropertySetterCallback setter = 0, + Nan::PropertyQueryCallback query = 0, + Nan::PropertyDeleterCallback deleter = 0, + Nan::PropertyEnumeratorCallback enumerator = 0, + v8::Local data = v8::Local()) +``` + +See the V8 [`ObjectTemplate#SetNamedPropertyHandler()`](https://v8docs.nodesource.com/node-8.11/db/d5f/classv8_1_1_object_template.html#a33b3ebd7de641f6cc6414b7de01fc1c7) for further information about how to use `Nan::SetNamedPropertyHandler()`. + + +### Nan::SetIndexedPropertyHandler() + +Sets indexed property getters, setters, query, deleter and enumerator methods on an `ObjectTemplate`. Accepts: + +* Indexed property getters with the `Nan::IndexGetterCallback` signature (see Index getter declaration) +* Indexed property setters with the `Nan::IndexSetterCallback` signature (see Index setter declaration) +* Indexed property query methods with the `Nan::IndexQueryCallback` signature (see Index query declaration) +* Indexed property deleters with the `Nan::IndexDeleterCallback` signature (see Index deleter declaration) +* Indexed property enumerators with the `Nan::IndexEnumeratorCallback` signature (see Index enumerator declaration) + +Signature: + +```c++ +void SetIndexedPropertyHandler(v8::Local tpl, + Nan::IndexGetterCallback getter, + Nan::IndexSetterCallback setter = 0, + Nan::IndexQueryCallback query = 0, + Nan::IndexDeleterCallback deleter = 0, + Nan::IndexEnumeratorCallback enumerator = 0, + v8::Local data = v8::Local()) +``` + +See the V8 [`ObjectTemplate#SetIndexedPropertyHandler()`](https://v8docs.nodesource.com/node-8.11/db/d5f/classv8_1_1_object_template.html#ac89f06d634add0e890452033f7d17ff1) for further information about how to use `Nan::SetIndexedPropertyHandler()`. + + +### Nan::SetTemplate() + +Adds properties on an `Object`'s or `Function`'s template. + +Signature: + +```c++ +void Nan::SetTemplate(v8::Local templ, + const char *name, + v8::Local value); +void Nan::SetTemplate(v8::Local templ, + v8::Local name, + v8::Local value, + v8::PropertyAttribute attributes) +``` + +Calls the `Template`'s [`Set()`](https://v8docs.nodesource.com/node-8.11/db/df7/classv8_1_1_template.html#ae3fbaff137557aa6a0233bc7e52214ac). + + +### Nan::SetPrototypeTemplate() + +Adds properties on an `Object`'s or `Function`'s prototype template. + +Signature: + +```c++ +void Nan::SetPrototypeTemplate(v8::Local templ, + const char *name, + v8::Local value); +void Nan::SetPrototypeTemplate(v8::Local templ, + v8::Local name, + v8::Local value, + v8::PropertyAttribute attributes) +``` + +Calls the `FunctionTemplate`'s _PrototypeTemplate's_ [`Set()`](https://v8docs.nodesource.com/node-8.11/db/df7/classv8_1_1_template.html#a2db6a56597bf23c59659c0659e564ddf). + + +### Nan::SetInstanceTemplate() + +Use to add instance properties on `FunctionTemplate`'s. + +Signature: + +```c++ +void Nan::SetInstanceTemplate(v8::Local templ, + const char *name, + v8::Local value); +void Nan::SetInstanceTemplate(v8::Local templ, + v8::Local name, + v8::Local value, + v8::PropertyAttribute attributes) +``` + +Calls the `FunctionTemplate`'s _InstanceTemplate's_ [`Set()`](https://v8docs.nodesource.com/node-8.11/db/df7/classv8_1_1_template.html#a2db6a56597bf23c59659c0659e564ddf). + + +### Nan::SetCallHandler() + +Set the call-handler callback for a `v8::FunctionTemplate`. +This callback is called whenever the function created from this FunctionTemplate is called. + +Signature: + +```c++ +void Nan::SetCallHandler(v8::Local templ, Nan::FunctionCallback callback, v8::Local data = v8::Local()) +``` + +Calls the `FunctionTemplate`'s [`SetCallHandler()`](https://v8docs.nodesource.com/node-8.11/d8/d83/classv8_1_1_function_template.html#ab7574b298db3c27fbc2ed465c08ea2f8). + + +### Nan::SetCallAsFunctionHandler() + +Sets the callback to be used when calling instances created from the `v8::ObjectTemplate` as a function. +If no callback is set, instances behave like normal JavaScript objects that cannot be called as a function. + +Signature: + +```c++ +void Nan::SetCallAsFunctionHandler(v8::Local templ, Nan::FunctionCallback callback, v8::Local data = v8::Local()) +``` + +Calls the `ObjectTemplate`'s [`SetCallAsFunctionHandler()`](https://v8docs.nodesource.com/node-8.11/db/d5f/classv8_1_1_object_template.html#a5e9612fc80bf6db8f2da199b9b0bd04e). + diff --git a/node_modules/nan/doc/new.md b/node_modules/nan/doc/new.md new file mode 100644 index 00000000..359df435 --- /dev/null +++ b/node_modules/nan/doc/new.md @@ -0,0 +1,147 @@ +## New + +NAN provides a `Nan::New()` helper for the creation of new JavaScript objects in a way that's compatible across the supported versions of V8. + + - Nan::New() + - Nan::Undefined() + - Nan::Null() + - Nan::True() + - Nan::False() + - Nan::EmptyString() + + + +### Nan::New() + +`Nan::New()` should be used to instantiate new JavaScript objects. + +Refer to the specific V8 type in the [V8 documentation](https://v8docs.nodesource.com/node-8.11/d1/d83/classv8_1_1_data.html) for information on the types of arguments required for instantiation. + +Signatures: + +Return types are mostly omitted from the signatures for simplicity. In most cases the type will be contained within a `v8::Local`. The following types will be contained within a `Nan::MaybeLocal`: `v8::String`, `v8::Date`, `v8::RegExp`, `v8::Script`, `v8::UnboundScript`. + +Empty objects: + +```c++ +Nan::New(); +``` + +Generic single and multiple-argument: + +```c++ +Nan::New(A0 arg0); +Nan::New(A0 arg0, A1 arg1); +Nan::New(A0 arg0, A1 arg1, A2 arg2); +Nan::New(A0 arg0, A1 arg1, A2 arg2, A3 arg3); +``` + +For creating `v8::FunctionTemplate` and `v8::Function` objects: + +_The definition of `Nan::FunctionCallback` can be found in the [Method declaration](./methods.md#api_nan_method) documentation._ + +```c++ +Nan::New(Nan::FunctionCallback callback, + v8::Local data = v8::Local()); +Nan::New(Nan::FunctionCallback callback, + v8::Local data = v8::Local(), + A2 a2 = A2()); +``` + +Native number types: + +```c++ +v8::Local Nan::New(bool value); +v8::Local Nan::New(int32_t value); +v8::Local Nan::New(uint32_t value); +v8::Local Nan::New(double value); +``` + +String types: + +```c++ +Nan::MaybeLocal Nan::New(std::string const& value); +Nan::MaybeLocal Nan::New(const char * value, int length); +Nan::MaybeLocal Nan::New(const char * value); +Nan::MaybeLocal Nan::New(const uint16_t * value); +Nan::MaybeLocal Nan::New(const uint16_t * value, int length); +``` + +Specialized types: + +```c++ +v8::Local Nan::New(v8::String::ExternalStringResource * value); +v8::Local Nan::New(Nan::ExternalOneByteStringResource * value); +v8::Local Nan::New(v8::Local pattern, v8::RegExp::Flags flags); +``` + +Note that `Nan::ExternalOneByteStringResource` maps to [`v8::String::ExternalOneByteStringResource`](https://v8docs.nodesource.com/node-8.11/d9/db3/classv8_1_1_string_1_1_external_one_byte_string_resource.html), and `v8::String::ExternalAsciiStringResource` in older versions of V8. + + + +### Nan::Undefined() + +A helper method to reference the `v8::Undefined` object in a way that is compatible across all supported versions of V8. + +Signature: + +```c++ +v8::Local Nan::Undefined() +``` + + +### Nan::Null() + +A helper method to reference the `v8::Null` object in a way that is compatible across all supported versions of V8. + +Signature: + +```c++ +v8::Local Nan::Null() +``` + + +### Nan::True() + +A helper method to reference the `v8::Boolean` object representing the `true` value in a way that is compatible across all supported versions of V8. + +Signature: + +```c++ +v8::Local Nan::True() +``` + + +### Nan::False() + +A helper method to reference the `v8::Boolean` object representing the `false` value in a way that is compatible across all supported versions of V8. + +Signature: + +```c++ +v8::Local Nan::False() +``` + + +### Nan::EmptyString() + +Call [`v8::String::Empty`](https://v8docs.nodesource.com/node-8.11/d2/db3/classv8_1_1_string.html#a7c1bc8886115d7ee46f1d571dd6ebc6d) to reference the empty string in a way that is compatible across all supported versions of V8. + +Signature: + +```c++ +v8::Local Nan::EmptyString() +``` + + + +### Nan::NewOneByteString() + +An implementation of [`v8::String::NewFromOneByte()`](https://v8docs.nodesource.com/node-8.11/d2/db3/classv8_1_1_string.html#a5264d50b96d2c896ce525a734dc10f09) provided for consistent availability and API across supported versions of V8. Allocates a new string from Latin-1 data. + +Signature: + +```c++ +Nan::MaybeLocal Nan::NewOneByteString(const uint8_t * value, + int length = -1) +``` diff --git a/node_modules/nan/doc/node_misc.md b/node_modules/nan/doc/node_misc.md new file mode 100644 index 00000000..17578e34 --- /dev/null +++ b/node_modules/nan/doc/node_misc.md @@ -0,0 +1,123 @@ +## Miscellaneous Node Helpers + + - Nan::AsyncResource + - Nan::MakeCallback() + - NAN_MODULE_INIT() + - Nan::Export() + + +### Nan::AsyncResource + +This class is analogous to the `AsyncResource` JavaScript class exposed by Node's [async_hooks][] API. + +When calling back into JavaScript asynchronously, special care must be taken to ensure that the runtime can properly track +async hops. `Nan::AsyncResource` is a class that provides an RAII wrapper around `node::EmitAsyncInit`, `node::EmitAsyncDestroy`, +and `node::MakeCallback`. Using this mechanism to call back into JavaScript, as opposed to `Nan::MakeCallback` or +`v8::Function::Call` ensures that the callback is executed in the correct async context. This ensures that async mechanisms +such as domains and [async_hooks][] function correctly. + +Definition: + +```c++ +class AsyncResource { + public: + AsyncResource(v8::Local name, + v8::Local resource = New()); + AsyncResource(const char* name, + v8::Local resource = New()); + ~AsyncResource(); + + v8::MaybeLocal runInAsyncScope(v8::Local target, + v8::Local func, + int argc, + v8::Local* argv); + v8::MaybeLocal runInAsyncScope(v8::Local target, + v8::Local symbol, + int argc, + v8::Local* argv); + v8::MaybeLocal runInAsyncScope(v8::Local target, + const char* method, + int argc, + v8::Local* argv); +}; +``` + +* `name`: Identifier for the kind of resource that is being provided for diagnostics information exposed by the [async_hooks][] + API. This will be passed to the possible `init` hook as the `type`. To avoid name collisions with other modules we recommend + that the name include the name of the owning module as a prefix. For example `mysql` module could use something like + `mysql:batch-db-query-resource`. +* `resource`: An optional object associated with the async work that will be passed to the possible [async_hooks][] + `init` hook. If this parameter is omitted, or an empty handle is provided, this object will be created automatically. +* When calling JS on behalf of this resource, one can use `runInAsyncScope`. This will ensure that the callback runs in the + correct async execution context. +* `AsyncDestroy` is automatically called when an AsyncResource object is destroyed. + +For more details, see the Node [async_hooks][] documentation. You might also want to take a look at the documentation for the +[N-API counterpart][napi]. For example usage, see the `asyncresource.cpp` example in the `test/cpp` directory. + + +### Nan::MakeCallback() + +Deprecated wrappers around the legacy `node::MakeCallback()` APIs. Node.js 10+ +has deprecated these legacy APIs as they do not provide a mechanism to preserve +async context. + +We recommend that you use the `AsyncResource` class and `AsyncResource::runInAsyncScope` instead of using `Nan::MakeCallback` or +`v8::Function#Call()` directly. `AsyncResource` properly takes care of running the callback in the correct async execution +context – something that is essential for functionality like domains, async_hooks and async debugging. + +Signatures: + +```c++ +NAN_DEPRECATED +v8::Local Nan::MakeCallback(v8::Local target, + v8::Local func, + int argc, + v8::Local* argv); +NAN_DEPRECATED +v8::Local Nan::MakeCallback(v8::Local target, + v8::Local symbol, + int argc, + v8::Local* argv); +NAN_DEPRECATED +v8::Local Nan::MakeCallback(v8::Local target, + const char* method, + int argc, + v8::Local* argv); +``` + + + +### NAN_MODULE_INIT() + +Used to define the entry point function to a Node add-on. Creates a function with a given `name` that receives a `target` object representing the equivalent of the JavaScript `exports` object. + +See example below. + + +### Nan::Export() + +A simple helper to register a `v8::FunctionTemplate` from a JavaScript-accessible method (see [Methods](./methods.md)) as a property on an object. Can be used in a way similar to assigning properties to `module.exports` in JavaScript. + +Signature: + +```c++ +void Export(v8::Local target, const char *name, Nan::FunctionCallback f) +``` + +Also available as the shortcut `NAN_EXPORT` macro. + +Example: + +```c++ +NAN_METHOD(Foo) { + ... +} + +NAN_MODULE_INIT(Init) { + NAN_EXPORT(target, Foo); +} +``` + +[async_hooks]: https://nodejs.org/dist/latest-v9.x/docs/api/async_hooks.html +[napi]: https://nodejs.org/dist/latest-v9.x/docs/api/n-api.html#n_api_custom_asynchronous_operations diff --git a/node_modules/nan/doc/object_wrappers.md b/node_modules/nan/doc/object_wrappers.md new file mode 100644 index 00000000..08dd6b55 --- /dev/null +++ b/node_modules/nan/doc/object_wrappers.md @@ -0,0 +1,263 @@ +## Object Wrappers + +The `ObjectWrap` class can be used to make wrapped C++ objects and a factory of wrapped objects. + + - Nan::ObjectWrap + + + +### Nan::ObjectWrap() + +A reimplementation of `node::ObjectWrap` that adds some API not present in older versions of Node. Should be preferred over `node::ObjectWrap` in all cases for consistency. + +Definition: + +```c++ +class ObjectWrap { + public: + ObjectWrap(); + + virtual ~ObjectWrap(); + + template + static inline T* Unwrap(v8::Local handle); + + inline v8::Local handle(); + + inline Nan::Persistent& persistent(); + + protected: + inline void Wrap(v8::Local handle); + + inline void MakeWeak(); + + /* Ref() marks the object as being attached to an event loop. + * Refed objects will not be garbage collected, even if + * all references are lost. + */ + virtual void Ref(); + + /* Unref() marks an object as detached from the event loop. This is its + * default state. When an object with a "weak" reference changes from + * attached to detached state it will be freed. Be careful not to access + * the object after making this call as it might be gone! + * (A "weak reference" means an object that only has a + * persistant handle.) + * + * DO NOT CALL THIS FROM DESTRUCTOR + */ + virtual void Unref(); + + int refs_; // ro +}; +``` + +See the Node documentation on [Wrapping C++ Objects](https://nodejs.org/api/addons.html#addons_wrapping_c_objects) for more details. + +### This vs. Holder + +When calling `Unwrap`, it is important that the argument is indeed some JavaScript object which got wrapped by a `Wrap` call for this class or any derived class. +The `Signature` installed by [`Nan::SetPrototypeMethod()`](methods.md#api_nan_set_prototype_method) does ensure that `info.Holder()` is just such an instance. +In Node 0.12 and later, `info.This()` will also be of such a type, since otherwise the invocation will get rejected. +However, in Node 0.10 and before it was possible to invoke a method on a JavaScript object which just had the extension type in its prototype chain. +In such a situation, calling `Unwrap` on `info.This()` will likely lead to a failed assertion causing a crash, but could lead to even more serious corruption. + +On the other hand, calling `Unwrap` in an [accessor](methods.md#api_nan_set_accessor) should not use `Holder()` if the accessor is defined on the prototype. +So either define your accessors on the instance template, +or use `This()` after verifying that it is indeed a valid object. + +### Examples + +#### Basic + +```c++ +class MyObject : public Nan::ObjectWrap { + public: + static NAN_MODULE_INIT(Init) { + v8::Local tpl = Nan::New(New); + tpl->SetClassName(Nan::New("MyObject").ToLocalChecked()); + tpl->InstanceTemplate()->SetInternalFieldCount(1); + + Nan::SetPrototypeMethod(tpl, "getHandle", GetHandle); + Nan::SetPrototypeMethod(tpl, "getValue", GetValue); + + constructor().Reset(Nan::GetFunction(tpl).ToLocalChecked()); + Nan::Set(target, Nan::New("MyObject").ToLocalChecked(), + Nan::GetFunction(tpl).ToLocalChecked()); + } + + private: + explicit MyObject(double value = 0) : value_(value) {} + ~MyObject() {} + + static NAN_METHOD(New) { + if (info.IsConstructCall()) { + double value = info[0]->IsUndefined() ? 0 : Nan::To(info[0]).FromJust(); + MyObject *obj = new MyObject(value); + obj->Wrap(info.This()); + info.GetReturnValue().Set(info.This()); + } else { + const int argc = 1; + v8::Local argv[argc] = {info[0]}; + v8::Local cons = Nan::New(constructor()); + info.GetReturnValue().Set(Nan::NewInstance(cons, argc, argv).ToLocalChecked()); + } + } + + static NAN_METHOD(GetHandle) { + MyObject* obj = Nan::ObjectWrap::Unwrap(info.Holder()); + info.GetReturnValue().Set(obj->handle()); + } + + static NAN_METHOD(GetValue) { + MyObject* obj = Nan::ObjectWrap::Unwrap(info.Holder()); + info.GetReturnValue().Set(obj->value_); + } + + static inline Nan::Persistent & constructor() { + static Nan::Persistent my_constructor; + return my_constructor; + } + + double value_; +}; + +NODE_MODULE(objectwrapper, MyObject::Init) +``` + +To use in Javascript: + +```Javascript +var objectwrapper = require('bindings')('objectwrapper'); + +var obj = new objectwrapper.MyObject(5); +console.log('Should be 5: ' + obj.getValue()); +``` + +#### Factory of wrapped objects + +```c++ +class MyFactoryObject : public Nan::ObjectWrap { + public: + static NAN_MODULE_INIT(Init) { + v8::Local tpl = Nan::New(New); + tpl->InstanceTemplate()->SetInternalFieldCount(1); + + Nan::SetPrototypeMethod(tpl, "getValue", GetValue); + + constructor().Reset(Nan::GetFunction(tpl).ToLocalChecked()); + } + + static NAN_METHOD(NewInstance) { + v8::Local cons = Nan::New(constructor()); + double value = info[0]->IsNumber() ? Nan::To(info[0]).FromJust() : 0; + const int argc = 1; + v8::Local argv[1] = {Nan::New(value)}; + info.GetReturnValue().Set(Nan::NewInstance(cons, argc, argv).ToLocalChecked()); + } + + // Needed for the next example: + inline double value() const { + return value_; + } + + private: + explicit MyFactoryObject(double value = 0) : value_(value) {} + ~MyFactoryObject() {} + + static NAN_METHOD(New) { + if (info.IsConstructCall()) { + double value = info[0]->IsNumber() ? Nan::To(info[0]).FromJust() : 0; + MyFactoryObject * obj = new MyFactoryObject(value); + obj->Wrap(info.This()); + info.GetReturnValue().Set(info.This()); + } else { + const int argc = 1; + v8::Local argv[argc] = {info[0]}; + v8::Local cons = Nan::New(constructor()); + info.GetReturnValue().Set(Nan::NewInstance(cons, argc, argv).ToLocalChecked()); + } + } + + static NAN_METHOD(GetValue) { + MyFactoryObject* obj = ObjectWrap::Unwrap(info.Holder()); + info.GetReturnValue().Set(obj->value_); + } + + static inline Nan::Persistent & constructor() { + static Nan::Persistent my_constructor; + return my_constructor; + } + + double value_; +}; + +NAN_MODULE_INIT(Init) { + MyFactoryObject::Init(target); + Nan::Set(target, + Nan::New("newFactoryObjectInstance").ToLocalChecked(), + Nan::GetFunction( + Nan::New(MyFactoryObject::NewInstance)).ToLocalChecked() + ); +} + +NODE_MODULE(wrappedobjectfactory, Init) +``` + +To use in Javascript: + +```Javascript +var wrappedobjectfactory = require('bindings')('wrappedobjectfactory'); + +var obj = wrappedobjectfactory.newFactoryObjectInstance(10); +console.log('Should be 10: ' + obj.getValue()); +``` + +#### Passing wrapped objects around + +Use the `MyFactoryObject` class above along with the following: + +```c++ +static NAN_METHOD(Sum) { + Nan::MaybeLocal maybe1 = Nan::To(info[0]); + Nan::MaybeLocal maybe2 = Nan::To(info[1]); + + // Quick check: + if (maybe1.IsEmpty() || maybe2.IsEmpty()) { + // return value is undefined by default + return; + } + + MyFactoryObject* obj1 = + Nan::ObjectWrap::Unwrap(maybe1.ToLocalChecked()); + MyFactoryObject* obj2 = + Nan::ObjectWrap::Unwrap(maybe2.ToLocalChecked()); + + info.GetReturnValue().Set(Nan::New(obj1->value() + obj2->value())); +} + +NAN_MODULE_INIT(Init) { + MyFactoryObject::Init(target); + Nan::Set(target, + Nan::New("newFactoryObjectInstance").ToLocalChecked(), + Nan::GetFunction( + Nan::New(MyFactoryObject::NewInstance)).ToLocalChecked() + ); + Nan::Set(target, + Nan::New("sum").ToLocalChecked(), + Nan::GetFunction(Nan::New(Sum)).ToLocalChecked() + ); +} + +NODE_MODULE(myaddon, Init) +``` + +To use in Javascript: + +```Javascript +var myaddon = require('bindings')('myaddon'); + +var obj1 = myaddon.newFactoryObjectInstance(5); +var obj2 = myaddon.newFactoryObjectInstance(10); +console.log('sum of object values: ' + myaddon.sum(obj1, obj2)); +``` diff --git a/node_modules/nan/doc/persistent.md b/node_modules/nan/doc/persistent.md new file mode 100644 index 00000000..bec9c3f3 --- /dev/null +++ b/node_modules/nan/doc/persistent.md @@ -0,0 +1,296 @@ +## Persistent references + +An object reference that is independent of any `HandleScope` is a _persistent_ reference. Where a `Local` handle only lives as long as the `HandleScope` in which it was allocated, a `Persistent` handle remains valid until it is explicitly disposed. + +Due to the evolution of the V8 API, it is necessary for NAN to provide a wrapper implementation of the `Persistent` classes to supply compatibility across the V8 versions supported. + + - Nan::PersistentBase & v8::PersistentBase + - Nan::NonCopyablePersistentTraits & v8::NonCopyablePersistentTraits + - Nan::CopyablePersistentTraits & v8::CopyablePersistentTraits + - Nan::Persistent + - Nan::Global + - Nan::WeakCallbackInfo + - Nan::WeakCallbackType + +Also see the V8 Embedders Guide section on [Handles and Garbage Collection](https://developers.google.com/v8/embed#handles). + + +### Nan::PersistentBase & v8::PersistentBase + +A persistent handle contains a reference to a storage cell in V8 which holds an object value and which is updated by the garbage collector whenever the object is moved. A new storage cell can be created using the constructor or `Nan::PersistentBase::Reset()`. Existing handles can be disposed using an argument-less `Nan::PersistentBase::Reset()`. + +Definition: + +_(note: this is implemented as `Nan::PersistentBase` for older versions of V8 and the native `v8::PersistentBase` is used for newer versions of V8)_ + +```c++ +template class PersistentBase { + public: + /** + * If non-empty, destroy the underlying storage cell + */ + void Reset(); + + /** + * If non-empty, destroy the underlying storage cell and create a new one with + * the contents of another if it is also non-empty + */ + template void Reset(const v8::Local &other); + + /** + * If non-empty, destroy the underlying storage cell and create a new one with + * the contents of another if it is also non-empty + */ + template void Reset(const PersistentBase &other); + + /** Returns true if the handle is empty. */ + bool IsEmpty() const; + + /** + * If non-empty, destroy the underlying storage cell + * IsEmpty() will return true after this call. + */ + void Empty(); + + template bool operator==(const PersistentBase &that); + + template bool operator==(const v8::Local &that); + + template bool operator!=(const PersistentBase &that); + + template bool operator!=(const v8::Local &that); + + /** + * Install a finalization callback on this object. + * NOTE: There is no guarantee as to *when* or even *if* the callback is + * invoked. The invocation is performed solely on a best effort basis. + * As always, GC-based finalization should *not* be relied upon for any + * critical form of resource management! At the moment you can either + * specify a parameter for the callback or the location of two internal + * fields in the dying object. + */ + template + void SetWeak(P *parameter, + typename WeakCallbackInfo

::Callback callback, + WeakCallbackType type); + + void ClearWeak(); + + /** + * Marks the reference to this object independent. Garbage collector is free + * to ignore any object groups containing this object. Weak callback for an + * independent handle should not assume that it will be preceded by a global + * GC prologue callback or followed by a global GC epilogue callback. + */ + void MarkIndependent() const; + + bool IsIndependent() const; + + /** Checks if the handle holds the only reference to an object. */ + bool IsNearDeath() const; + + /** Returns true if the handle's reference is weak. */ + bool IsWeak() const +}; +``` + +See the V8 documentation for [`PersistentBase`](https://v8docs.nodesource.com/node-8.11/d4/dca/classv8_1_1_persistent_base.html) for further information. + +**Tip:** To get a `v8::Local` reference to the original object back from a `PersistentBase` or `Persistent` object: + +```c++ +v8::Local object = Nan::New(persistent); +``` + + +### Nan::NonCopyablePersistentTraits & v8::NonCopyablePersistentTraits + +Default traits for `Nan::Persistent`. This class does not allow use of the a copy constructor or assignment operator. At present `kResetInDestructor` is not set, but that will change in a future version. + +Definition: + +_(note: this is implemented as `Nan::NonCopyablePersistentTraits` for older versions of V8 and the native `v8::NonCopyablePersistentTraits` is used for newer versions of V8)_ + +```c++ +template class NonCopyablePersistentTraits { + public: + typedef Persistent > NonCopyablePersistent; + + static const bool kResetInDestructor = false; + + template + static void Copy(const Persistent &source, + NonCopyablePersistent *dest); + + template static void Uncompilable(); +}; +``` + +See the V8 documentation for [`NonCopyablePersistentTraits`](https://v8docs.nodesource.com/node-8.11/de/d73/classv8_1_1_non_copyable_persistent_traits.html) for further information. + + +### Nan::CopyablePersistentTraits & v8::CopyablePersistentTraits + +A helper class of traits to allow copying and assignment of `Persistent`. This will clone the contents of storage cell, but not any of the flags, etc.. + +Definition: + +_(note: this is implemented as `Nan::CopyablePersistentTraits` for older versions of V8 and the native `v8::NonCopyablePersistentTraits` is used for newer versions of V8)_ + +```c++ +template +class CopyablePersistentTraits { + public: + typedef Persistent > CopyablePersistent; + + static const bool kResetInDestructor = true; + + template + static void Copy(const Persistent &source, + CopyablePersistent *dest); +}; +``` + +See the V8 documentation for [`CopyablePersistentTraits`](https://v8docs.nodesource.com/node-8.11/da/d5c/structv8_1_1_copyable_persistent_traits.html) for further information. + + +### Nan::Persistent + +A type of `PersistentBase` which allows copy and assignment. Copy, assignment and destructor behavior is controlled by the traits class `M`. + +Definition: + +```c++ +template > +class Persistent; + +template class Persistent : public PersistentBase { + public: + /** + * A Persistent with no storage cell. + */ + Persistent(); + + /** + * Construct a Persistent from a v8::Local. When the v8::Local is non-empty, a + * new storage cell is created pointing to the same object, and no flags are + * set. + */ + template Persistent(v8::Local that); + + /** + * Construct a Persistent from a Persistent. When the Persistent is non-empty, + * a new storage cell is created pointing to the same object, and no flags are + * set. + */ + Persistent(const Persistent &that); + + /** + * The copy constructors and assignment operator create a Persistent exactly + * as the Persistent constructor, but the Copy function from the traits class + * is called, allowing the setting of flags based on the copied Persistent. + */ + Persistent &operator=(const Persistent &that); + + template + Persistent &operator=(const Persistent &that); + + /** + * The destructor will dispose the Persistent based on the kResetInDestructor + * flags in the traits class. Since not calling dispose can result in a + * memory leak, it is recommended to always set this flag. + */ + ~Persistent(); +}; +``` + +See the V8 documentation for [`Persistent`](https://v8docs.nodesource.com/node-8.11/d2/d78/classv8_1_1_persistent.html) for further information. + + +### Nan::Global + +A type of `PersistentBase` which has move semantics. + +```c++ +template class Global : public PersistentBase { + public: + /** + * A Global with no storage cell. + */ + Global(); + + /** + * Construct a Global from a v8::Local. When the v8::Local is non-empty, a new + * storage cell is created pointing to the same object, and no flags are set. + */ + template Global(v8::Local that); + /** + * Construct a Global from a PersistentBase. When the Persistent is non-empty, + * a new storage cell is created pointing to the same object, and no flags are + * set. + */ + template Global(const PersistentBase &that); + + /** + * Pass allows returning globals from functions, etc. + */ + Global Pass(); +}; +``` + +See the V8 documentation for [`Global`](https://v8docs.nodesource.com/node-8.11/d5/d40/classv8_1_1_global.html) for further information. + + +### Nan::WeakCallbackInfo + +`Nan::WeakCallbackInfo` is used as an argument when setting a persistent reference as weak. You may need to free any external resources attached to the object. It is a mirror of `v8:WeakCallbackInfo` as found in newer versions of V8. + +Definition: + +```c++ +template class WeakCallbackInfo { + public: + typedef void (*Callback)(const WeakCallbackInfo& data); + + v8::Isolate *GetIsolate() const; + + /** + * Get the parameter that was associated with the weak handle. + */ + T *GetParameter() const; + + /** + * Get pointer from internal field, index can be 0 or 1. + */ + void *GetInternalField(int index) const; +}; +``` + +Example usage: + +```c++ +void weakCallback(const WeakCallbackInfo &data) { + int *parameter = data.GetParameter(); + delete parameter; +} + +Persistent obj; +int *data = new int(0); +obj.SetWeak(data, callback, WeakCallbackType::kParameter); +``` + +See the V8 documentation for [`WeakCallbackInfo`](https://v8docs.nodesource.com/node-8.11/d8/d06/classv8_1_1_weak_callback_info.html) for further information. + + +### Nan::WeakCallbackType + +Represents the type of a weak callback. +A weak callback of type `kParameter` makes the supplied parameter to `Nan::PersistentBase::SetWeak` available through `WeakCallbackInfo::GetParameter`. +A weak callback of type `kInternalFields` uses up to two internal fields at indices 0 and 1 on the `Nan::PersistentBase` being made weak. +Note that only `v8::Object`s and derivatives can have internal fields. + +Definition: + +```c++ +enum class WeakCallbackType { kParameter, kInternalFields }; +``` diff --git a/node_modules/nan/doc/scopes.md b/node_modules/nan/doc/scopes.md new file mode 100644 index 00000000..27ab8630 --- /dev/null +++ b/node_modules/nan/doc/scopes.md @@ -0,0 +1,73 @@ +## Scopes + +A _local handle_ is a pointer to an object. All V8 objects are accessed using handles, they are necessary because of the way the V8 garbage collector works. + +A handle scope can be thought of as a container for any number of handles. When you've finished with your handles, instead of deleting each one individually you can simply delete their scope. + +The creation of `HandleScope` objects is different across the supported versions of V8. Therefore, NAN provides its own implementations that can be used safely across these. + + - Nan::HandleScope + - Nan::EscapableHandleScope + +Also see the V8 Embedders Guide section on [Handles and Garbage Collection](https://github.com/v8/v8/wiki/Embedder%27s%20Guide#handles-and-garbage-collection). + + +### Nan::HandleScope + +A simple wrapper around [`v8::HandleScope`](https://v8docs.nodesource.com/node-8.11/d3/d95/classv8_1_1_handle_scope.html). + +Definition: + +```c++ +class Nan::HandleScope { + public: + Nan::HandleScope(); + static int NumberOfHandles(); +}; +``` + +Allocate a new `Nan::HandleScope` whenever you are creating new V8 JavaScript objects. Note that an implicit `HandleScope` is created for you on JavaScript-accessible methods so you do not need to insert one yourself. + +Example: + +```c++ +// new object is created, it needs a new scope: +void Pointless() { + Nan::HandleScope scope; + v8::Local obj = Nan::New(); +} + +// JavaScript-accessible method already has a HandleScope +NAN_METHOD(Pointless2) { + v8::Local obj = Nan::New(); +} +``` + + +### Nan::EscapableHandleScope + +Similar to [`Nan::HandleScope`](#api_nan_handle_scope) but should be used in cases where a function needs to return a V8 JavaScript type that has been created within it. + +Definition: + +```c++ +class Nan::EscapableHandleScope { + public: + Nan::EscapableHandleScope(); + static int NumberOfHandles(); + template v8::Local Escape(v8::Local value); +} +``` + +Use `Escape(value)` to return the object. + +Example: + +```c++ +v8::Local EmptyObj() { + Nan::EscapableHandleScope scope; + v8::Local obj = Nan::New(); + return scope.Escape(obj); +} +``` + diff --git a/node_modules/nan/doc/script.md b/node_modules/nan/doc/script.md new file mode 100644 index 00000000..945398f0 --- /dev/null +++ b/node_modules/nan/doc/script.md @@ -0,0 +1,38 @@ +## Script + +NAN provides a `v8::Script` helpers as the API has changed over the supported versions of V8. + + - Nan::CompileScript() + - Nan::RunScript() + + + +### Nan::CompileScript() + +A wrapper around [`v8::ScriptCompiler::Compile()`](https://v8docs.nodesource.com/node-8.11/da/da5/classv8_1_1_script_compiler.html#a93f5072a0db55d881b969e9fc98e564b). + +Note that `Nan::BoundScript` is an alias for `v8::Script`. + +Signature: + +```c++ +Nan::MaybeLocal Nan::CompileScript( + v8::Local s, + const v8::ScriptOrigin& origin); +Nan::MaybeLocal Nan::CompileScript(v8::Local s); +``` + + + +### Nan::RunScript() + +Calls `script->Run()` or `script->BindToCurrentContext()->Run(Nan::GetCurrentContext())`. + +Note that `Nan::BoundScript` is an alias for `v8::Script` and `Nan::UnboundScript` is an alias for `v8::UnboundScript` where available and `v8::Script` on older versions of V8. + +Signature: + +```c++ +Nan::MaybeLocal Nan::RunScript(v8::Local script) +Nan::MaybeLocal Nan::RunScript(v8::Local script) +``` diff --git a/node_modules/nan/doc/string_bytes.md b/node_modules/nan/doc/string_bytes.md new file mode 100644 index 00000000..7c1bd325 --- /dev/null +++ b/node_modules/nan/doc/string_bytes.md @@ -0,0 +1,62 @@ +## Strings & Bytes + +Miscellaneous string & byte encoding and decoding functionality provided for compatibility across supported versions of V8 and Node. Implemented by NAN to ensure that all encoding types are supported, even for older versions of Node where they are missing. + + - Nan::Encoding + - Nan::Encode() + - Nan::DecodeBytes() + - Nan::DecodeWrite() + + + +### Nan::Encoding + +An enum representing the supported encoding types. A copy of `node::encoding` that is consistent across versions of Node. + +Definition: + +```c++ +enum Nan::Encoding { ASCII, UTF8, BASE64, UCS2, BINARY, HEX, BUFFER } +``` + + + +### Nan::Encode() + +A wrapper around `node::Encode()` that provides a consistent implementation across supported versions of Node. + +Signature: + +```c++ +v8::Local Nan::Encode(const void *buf, + size_t len, + enum Nan::Encoding encoding = BINARY); +``` + + + +### Nan::DecodeBytes() + +A wrapper around `node::DecodeBytes()` that provides a consistent implementation across supported versions of Node. + +Signature: + +```c++ +ssize_t Nan::DecodeBytes(v8::Local val, + enum Nan::Encoding encoding = BINARY); +``` + + + +### Nan::DecodeWrite() + +A wrapper around `node::DecodeWrite()` that provides a consistent implementation across supported versions of Node. + +Signature: + +```c++ +ssize_t Nan::DecodeWrite(char *buf, + size_t len, + v8::Local val, + enum Nan::Encoding encoding = BINARY); +``` diff --git a/node_modules/nan/doc/v8_internals.md b/node_modules/nan/doc/v8_internals.md new file mode 100644 index 00000000..88bd2deb --- /dev/null +++ b/node_modules/nan/doc/v8_internals.md @@ -0,0 +1,199 @@ +## V8 internals + +The hooks to access V8 internals—including GC and statistics—are different across the supported versions of V8, therefore NAN provides its own hooks that call the appropriate V8 methods. + + - NAN_GC_CALLBACK() + - Nan::AddGCEpilogueCallback() + - Nan::RemoveGCEpilogueCallback() + - Nan::AddGCPrologueCallback() + - Nan::RemoveGCPrologueCallback() + - Nan::GetHeapStatistics() + - Nan::SetCounterFunction() + - Nan::SetCreateHistogramFunction() + - Nan::SetAddHistogramSampleFunction() + - Nan::IdleNotification() + - Nan::LowMemoryNotification() + - Nan::ContextDisposedNotification() + - Nan::GetInternalFieldPointer() + - Nan::SetInternalFieldPointer() + - Nan::AdjustExternalMemory() + + + +### NAN_GC_CALLBACK(callbackname) + +Use `NAN_GC_CALLBACK` to declare your callbacks for `Nan::AddGCPrologueCallback()` and `Nan::AddGCEpilogueCallback()`. Your new method receives the arguments `v8::GCType type` and `v8::GCCallbackFlags flags`. + +```c++ +static Nan::Persistent callback; + +NAN_GC_CALLBACK(gcPrologueCallback) { + v8::Local argv[] = { Nan::New("prologue").ToLocalChecked() }; + Nan::MakeCallback(Nan::GetCurrentContext()->Global(), Nan::New(callback), 1, argv); +} + +NAN_METHOD(Hook) { + callback.Reset(To(args[0]).ToLocalChecked()); + Nan::AddGCPrologueCallback(gcPrologueCallback); + info.GetReturnValue().Set(info.Holder()); +} +``` + + +### Nan::AddGCEpilogueCallback() + +Signature: + +```c++ +void Nan::AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback, v8::GCType gc_type_filter = v8::kGCTypeAll) +``` + +Calls V8's [`AddGCEpilogueCallback()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#a580f976e4290cead62c2fc4dd396be3e). + + +### Nan::RemoveGCEpilogueCallback() + +Signature: + +```c++ +void Nan::RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback) +``` + +Calls V8's [`RemoveGCEpilogueCallback()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#adca9294555a3908e9f23c7bb0f0f284c). + + +### Nan::AddGCPrologueCallback() + +Signature: + +```c++ +void Nan::AddGCPrologueCallback(v8::Isolate::GCPrologueCallback, v8::GCType gc_type_filter callback) +``` + +Calls V8's [`AddGCPrologueCallback()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#a6dbef303603ebdb03da6998794ea05b8). + + +### Nan::RemoveGCPrologueCallback() + +Signature: + +```c++ +void Nan::RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback) +``` + +Calls V8's [`RemoveGCPrologueCallback()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#a5f72c7cda21415ce062bbe5c58abe09e). + + +### Nan::GetHeapStatistics() + +Signature: + +```c++ +void Nan::GetHeapStatistics(v8::HeapStatistics *heap_statistics) +``` + +Calls V8's [`GetHeapStatistics()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#a5593ac74687b713095c38987e5950b34). + + +### Nan::SetCounterFunction() + +Signature: + +```c++ +void Nan::SetCounterFunction(v8::CounterLookupCallback cb) +``` + +Calls V8's [`SetCounterFunction()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#a045d7754e62fa0ec72ae6c259b29af94). + + +### Nan::SetCreateHistogramFunction() + +Signature: + +```c++ +void Nan::SetCreateHistogramFunction(v8::CreateHistogramCallback cb) +``` + +Calls V8's [`SetCreateHistogramFunction()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#a542d67e85089cb3f92aadf032f99e732). + + +### Nan::SetAddHistogramSampleFunction() + +Signature: + +```c++ +void Nan::SetAddHistogramSampleFunction(v8::AddHistogramSampleCallback cb) +``` + +Calls V8's [`SetAddHistogramSampleFunction()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#aeb420b690bc2c216882d6fdd00ddd3ea). + + +### Nan::IdleNotification() + +Signature: + +```c++ +bool Nan::IdleNotification(int idle_time_in_ms) +``` + +Calls V8's [`IdleNotification()` or `IdleNotificationDeadline()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#ad6a2a02657f5425ad460060652a5a118) depending on V8 version. + + +### Nan::LowMemoryNotification() + +Signature: + +```c++ +void Nan::LowMemoryNotification() +``` + +Calls V8's [`LowMemoryNotification()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#a24647f61d6b41f69668094bdcd6ea91f). + + +### Nan::ContextDisposedNotification() + +Signature: + +```c++ +void Nan::ContextDisposedNotification() +``` + +Calls V8's [`ContextDisposedNotification()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#ad7f5dc559866343fe6cd8db1f134d48b). + + +### Nan::GetInternalFieldPointer() + +Gets a pointer to the internal field with at `index` from a V8 `Object` handle. + +Signature: + +```c++ +void* Nan::GetInternalFieldPointer(v8::Local object, int index) +``` + +Calls the Object's [`GetAlignedPointerFromInternalField()` or `GetPointerFromInternalField()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#a580ea84afb26c005d6762eeb9e3c308f) depending on the version of V8. + + +### Nan::SetInternalFieldPointer() + +Sets the value of the internal field at `index` on a V8 `Object` handle. + +Signature: + +```c++ +void Nan::SetInternalFieldPointer(v8::Local object, int index, void* value) +``` + +Calls the Object's [`SetAlignedPointerInInternalField()` or `SetPointerInInternalField()`](https://v8docs.nodesource.com/node-8.11/db/d85/classv8_1_1_object.html#ab3c57184263cf29963ef0017bec82281) depending on the version of V8. + + +### Nan::AdjustExternalMemory() + +Signature: + +```c++ +int Nan::AdjustExternalMemory(int bytesChange) +``` + +Calls V8's [`AdjustAmountOfExternalAllocatedMemory()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#ae1a59cac60409d3922582c4af675473e). + diff --git a/node_modules/nan/doc/v8_misc.md b/node_modules/nan/doc/v8_misc.md new file mode 100644 index 00000000..8e2db20d --- /dev/null +++ b/node_modules/nan/doc/v8_misc.md @@ -0,0 +1,85 @@ +## Miscellaneous V8 Helpers + + - Nan::Utf8String + - Nan::GetCurrentContext() + - Nan::SetIsolateData() + - Nan::GetIsolateData() + - Nan::TypedArrayContents + + + +### Nan::Utf8String + +Converts an object to a UTF-8-encoded character array. If conversion to a string fails (e.g. due to an exception in the toString() method of the object) then the length() method returns 0 and the * operator returns NULL. The underlying memory used for this object is managed by the object. + +An implementation of [`v8::String::Utf8Value`](https://v8docs.nodesource.com/node-8.11/d4/d1b/classv8_1_1_string_1_1_utf8_value.html) that is consistent across all supported versions of V8. + +Definition: + +```c++ +class Nan::Utf8String { + public: + Nan::Utf8String(v8::Local from); + + int length() const; + + char* operator*(); + const char* operator*() const; +}; +``` + + +### Nan::GetCurrentContext() + +A call to [`v8::Isolate::GetCurrent()->GetCurrentContext()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#a81c7a1ed7001ae2a65e89107f75fd053) that works across all supported versions of V8. + +Signature: + +```c++ +v8::Local Nan::GetCurrentContext() +``` + + +### Nan::SetIsolateData() + +A helper to provide a consistent API to [`v8::Isolate#SetData()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#a7acadfe7965997e9c386a05f098fbe36). + +Signature: + +```c++ +void Nan::SetIsolateData(v8::Isolate *isolate, T *data) +``` + + + +### Nan::GetIsolateData() + +A helper to provide a consistent API to [`v8::Isolate#GetData()`](https://v8docs.nodesource.com/node-8.11/d5/dda/classv8_1_1_isolate.html#aabd223436bc1100a787dadaa024c6257). + +Signature: + +```c++ +T *Nan::GetIsolateData(v8::Isolate *isolate) +``` + + +### Nan::TypedArrayContents + +A helper class for accessing the contents of an ArrayBufferView (aka a typedarray) from C++. If the input array is not a valid typedarray, then the data pointer of TypedArrayContents will default to `NULL` and the length will be 0. If the data pointer is not compatible with the alignment requirements of type, an assertion error will fail. + +Note that you must store a reference to the `array` object while you are accessing its contents. + +Definition: + +```c++ +template +class Nan::TypedArrayContents { + public: + TypedArrayContents(v8::Local array); + + size_t length() const; + + T* const operator*(); + const T* const operator*() const; +}; +``` diff --git a/node_modules/nan/include_dirs.js b/node_modules/nan/include_dirs.js new file mode 100644 index 00000000..4f1dfb41 --- /dev/null +++ b/node_modules/nan/include_dirs.js @@ -0,0 +1 @@ +console.log(require('path').relative('.', __dirname)); diff --git a/node_modules/nan/nan.h b/node_modules/nan/nan.h new file mode 100644 index 00000000..514115d9 --- /dev/null +++ b/node_modules/nan/nan.h @@ -0,0 +1,2892 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors: + * - Rod Vagg + * - Benjamin Byholm + * - Trevor Norris + * - Nathan Rajlich + * - Brett Lawson + * - Ben Noordhuis + * - David Siegel + * - Michael Ira Krufky + * + * MIT License + * + * Version 2.14.0: current Node 12.2.0, Node 12: 0.12.18, Node 10: 0.10.48, iojs: 3.3.1 + * + * See https://github.com/nodejs/nan for the latest update to this file + **********************************************************************************/ + +#ifndef NAN_H_ +#define NAN_H_ + +#include + +#define NODE_0_10_MODULE_VERSION 11 +#define NODE_0_12_MODULE_VERSION 14 +#define ATOM_0_21_MODULE_VERSION 41 +#define IOJS_1_0_MODULE_VERSION 42 +#define IOJS_1_1_MODULE_VERSION 43 +#define IOJS_2_0_MODULE_VERSION 44 +#define IOJS_3_0_MODULE_VERSION 45 +#define NODE_4_0_MODULE_VERSION 46 +#define NODE_5_0_MODULE_VERSION 47 +#define NODE_6_0_MODULE_VERSION 48 +#define NODE_7_0_MODULE_VERSION 51 +#define NODE_8_0_MODULE_VERSION 57 +#define NODE_9_0_MODULE_VERSION 59 +#define NODE_10_0_MODULE_VERSION 64 +#define NODE_11_0_MODULE_VERSION 67 +#define NODE_12_0_MODULE_VERSION 72 + +#ifdef _MSC_VER +# define NAN_HAS_CPLUSPLUS_11 (_MSC_VER >= 1800) +#else +# define NAN_HAS_CPLUSPLUS_11 (__cplusplus >= 201103L) +#endif + +#if NODE_MODULE_VERSION >= IOJS_3_0_MODULE_VERSION && !NAN_HAS_CPLUSPLUS_11 +# error This version of node/NAN/v8 requires a C++11 compiler +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#if defined(_MSC_VER) +# pragma warning( push ) +# pragma warning( disable : 4530 ) +# include +# include +# include +# pragma warning( pop ) +#else +# include +# include +# include +#endif + +// uv helpers +#ifdef UV_VERSION_MAJOR +# ifndef UV_VERSION_PATCH +# define UV_VERSION_PATCH 0 +# endif +# define NAUV_UVVERSION ((UV_VERSION_MAJOR << 16) | \ + (UV_VERSION_MINOR << 8) | \ + (UV_VERSION_PATCH)) +#else +# define NAUV_UVVERSION 0x000b00 +#endif + +#if NAUV_UVVERSION < 0x000b0b +# ifdef WIN32 +# include +# else +# include +# endif +#endif + +namespace Nan { + +#define NAN_CONCAT(a, b) NAN_CONCAT_HELPER(a, b) +#define NAN_CONCAT_HELPER(a, b) a##b + +#define NAN_INLINE inline // TODO(bnoordhuis) Remove in v3.0.0. + +#if defined(__GNUC__) && \ + !(defined(V8_DISABLE_DEPRECATIONS) && V8_DISABLE_DEPRECATIONS) +# define NAN_DEPRECATED __attribute__((deprecated)) +#elif defined(_MSC_VER) && \ + !(defined(V8_DISABLE_DEPRECATIONS) && V8_DISABLE_DEPRECATIONS) +# define NAN_DEPRECATED __declspec(deprecated) +#else +# define NAN_DEPRECATED +#endif + +#if NAN_HAS_CPLUSPLUS_11 +# define NAN_DISALLOW_ASSIGN(CLASS) void operator=(const CLASS&) = delete; +# define NAN_DISALLOW_COPY(CLASS) CLASS(const CLASS&) = delete; +# define NAN_DISALLOW_MOVE(CLASS) \ + CLASS(CLASS&&) = delete; /* NOLINT(build/c++11) */ \ + void operator=(CLASS&&) = delete; +#else +# define NAN_DISALLOW_ASSIGN(CLASS) void operator=(const CLASS&); +# define NAN_DISALLOW_COPY(CLASS) CLASS(const CLASS&); +# define NAN_DISALLOW_MOVE(CLASS) +#endif + +#define NAN_DISALLOW_ASSIGN_COPY(CLASS) \ + NAN_DISALLOW_ASSIGN(CLASS) \ + NAN_DISALLOW_COPY(CLASS) + +#define NAN_DISALLOW_ASSIGN_MOVE(CLASS) \ + NAN_DISALLOW_ASSIGN(CLASS) \ + NAN_DISALLOW_MOVE(CLASS) + +#define NAN_DISALLOW_COPY_MOVE(CLASS) \ + NAN_DISALLOW_COPY(CLASS) \ + NAN_DISALLOW_MOVE(CLASS) + +#define NAN_DISALLOW_ASSIGN_COPY_MOVE(CLASS) \ + NAN_DISALLOW_ASSIGN(CLASS) \ + NAN_DISALLOW_COPY(CLASS) \ + NAN_DISALLOW_MOVE(CLASS) + +#define TYPE_CHECK(T, S) \ + while (false) { \ + *(static_cast(0)) = static_cast(0); \ + } + +//=== RegistrationFunction ===================================================== + +#if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + typedef v8::Handle ADDON_REGISTER_FUNCTION_ARGS_TYPE; +#else + typedef v8::Local ADDON_REGISTER_FUNCTION_ARGS_TYPE; +#endif + +#define NAN_MODULE_INIT(name) \ + void name(Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE target) + +#if NODE_MAJOR_VERSION >= 10 || \ + NODE_MAJOR_VERSION == 9 && NODE_MINOR_VERSION >= 3 +#define NAN_MODULE_WORKER_ENABLED(module_name, registration) \ + extern "C" NODE_MODULE_EXPORT void \ + NAN_CONCAT(node_register_module_v, NODE_MODULE_VERSION)( \ + v8::Local exports, v8::Local module, \ + v8::Local context) \ + { \ + registration(exports); \ + } +#else +#define NAN_MODULE_WORKER_ENABLED(module_name, registration) \ + NODE_MODULE(module_name, registration) +#endif + +//=== CallbackInfo ============================================================= + +#include "nan_callbacks.h" // NOLINT(build/include) + +//============================================================================== + +#if (NODE_MODULE_VERSION < NODE_0_12_MODULE_VERSION) +typedef v8::Script UnboundScript; +typedef v8::Script BoundScript; +#else +typedef v8::UnboundScript UnboundScript; +typedef v8::Script BoundScript; +#endif + +#if (NODE_MODULE_VERSION < ATOM_0_21_MODULE_VERSION) +typedef v8::String::ExternalAsciiStringResource + ExternalOneByteStringResource; +#else +typedef v8::String::ExternalOneByteStringResource + ExternalOneByteStringResource; +#endif + +#if (NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION) +template +class NonCopyablePersistentTraits : + public v8::NonCopyablePersistentTraits {}; +template +class CopyablePersistentTraits : + public v8::CopyablePersistentTraits {}; + +template +class PersistentBase : + public v8::PersistentBase {}; + +template > +class Persistent; +#else +template class NonCopyablePersistentTraits; +template class PersistentBase; +template class WeakCallbackData; +template > +class Persistent; +#endif // NODE_MODULE_VERSION + +template +class Maybe { + public: + inline bool IsNothing() const { return !has_value_; } + inline bool IsJust() const { return has_value_; } + + inline T ToChecked() const { return FromJust(); } + inline void Check() const { FromJust(); } + + inline bool To(T* out) const { + if (IsJust()) *out = value_; + return IsJust(); + } + + inline T FromJust() const { +#if defined(V8_ENABLE_CHECKS) + assert(IsJust() && "FromJust is Nothing"); +#endif // V8_ENABLE_CHECKS + return value_; + } + + inline T FromMaybe(const T& default_value) const { + return has_value_ ? value_ : default_value; + } + + inline bool operator==(const Maybe &other) const { + return (IsJust() == other.IsJust()) && + (!IsJust() || FromJust() == other.FromJust()); + } + + inline bool operator!=(const Maybe &other) const { + return !operator==(other); + } + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + // Allow implicit conversions from v8::Maybe to Nan::Maybe. + Maybe(const v8::Maybe& that) // NOLINT(runtime/explicit) + : has_value_(that.IsJust()) + , value_(that.FromMaybe(T())) {} +#endif + + private: + Maybe() : has_value_(false) {} + explicit Maybe(const T& t) : has_value_(true), value_(t) {} + bool has_value_; + T value_; + + template + friend Maybe Nothing(); + template + friend Maybe Just(const U& u); +}; + +template +inline Maybe Nothing() { + return Maybe(); +} + +template +inline Maybe Just(const T& t) { + return Maybe(t); +} + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +# include "nan_maybe_43_inl.h" // NOLINT(build/include) +#else +# include "nan_maybe_pre_43_inl.h" // NOLINT(build/include) +#endif + +#include "nan_converters.h" // NOLINT(build/include) +#include "nan_new.h" // NOLINT(build/include) + +#if NAUV_UVVERSION < 0x000b17 +#define NAUV_WORK_CB(func) \ + void func(uv_async_t *async, int) +#else +#define NAUV_WORK_CB(func) \ + void func(uv_async_t *async) +#endif + +#if NAUV_UVVERSION >= 0x000b0b + +typedef uv_key_t nauv_key_t; + +inline int nauv_key_create(nauv_key_t *key) { + return uv_key_create(key); +} + +inline void nauv_key_delete(nauv_key_t *key) { + uv_key_delete(key); +} + +inline void* nauv_key_get(nauv_key_t *key) { + return uv_key_get(key); +} + +inline void nauv_key_set(nauv_key_t *key, void *value) { + uv_key_set(key, value); +} + +#else + +/* Implement thread local storage for older versions of libuv. + * This is essentially a backport of libuv commit 5d2434bf + * written by Ben Noordhuis, adjusted for names and inline. + */ + +#ifndef WIN32 + +typedef pthread_key_t nauv_key_t; + +inline int nauv_key_create(nauv_key_t* key) { + return -pthread_key_create(key, NULL); +} + +inline void nauv_key_delete(nauv_key_t* key) { + if (pthread_key_delete(*key)) + abort(); +} + +inline void* nauv_key_get(nauv_key_t* key) { + return pthread_getspecific(*key); +} + +inline void nauv_key_set(nauv_key_t* key, void* value) { + if (pthread_setspecific(*key, value)) + abort(); +} + +#else + +typedef struct { + DWORD tls_index; +} nauv_key_t; + +inline int nauv_key_create(nauv_key_t* key) { + key->tls_index = TlsAlloc(); + if (key->tls_index == TLS_OUT_OF_INDEXES) + return UV_ENOMEM; + return 0; +} + +inline void nauv_key_delete(nauv_key_t* key) { + if (TlsFree(key->tls_index) == FALSE) + abort(); + key->tls_index = TLS_OUT_OF_INDEXES; +} + +inline void* nauv_key_get(nauv_key_t* key) { + void* value = TlsGetValue(key->tls_index); + if (value == NULL) + if (GetLastError() != ERROR_SUCCESS) + abort(); + return value; +} + +inline void nauv_key_set(nauv_key_t* key, void* value) { + if (TlsSetValue(key->tls_index, value) == FALSE) + abort(); +} + +#endif +#endif + +#if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION +template +v8::Local New(v8::Handle); +#endif + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + typedef v8::WeakCallbackType WeakCallbackType; +#else +struct WeakCallbackType { + enum E {kParameter, kInternalFields}; + E type; + WeakCallbackType(E other) : type(other) {} // NOLINT(runtime/explicit) + inline bool operator==(E other) { return other == this->type; } + inline bool operator!=(E other) { return !operator==(other); } +}; +#endif + +template class WeakCallbackInfo; + +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION +# include "nan_persistent_12_inl.h" // NOLINT(build/include) +#else +# include "nan_persistent_pre_12_inl.h" // NOLINT(build/include) +#endif + +namespace imp { + static const size_t kMaxLength = 0x3fffffff; + // v8::String::REPLACE_INVALID_UTF8 was introduced + // in node.js v0.10.29 and v0.8.27. +#if NODE_MAJOR_VERSION > 0 || \ + NODE_MINOR_VERSION > 10 || \ + NODE_MINOR_VERSION == 10 && NODE_PATCH_VERSION >= 29 || \ + NODE_MINOR_VERSION == 8 && NODE_PATCH_VERSION >= 27 + static const unsigned kReplaceInvalidUtf8 = v8::String::REPLACE_INVALID_UTF8; +#else + static const unsigned kReplaceInvalidUtf8 = 0; +#endif +} // end of namespace imp + +//=== HandleScope ============================================================== + +class HandleScope { + v8::HandleScope scope; + + public: +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + inline HandleScope() : scope(v8::Isolate::GetCurrent()) {} + inline static int NumberOfHandles() { + return v8::HandleScope::NumberOfHandles(v8::Isolate::GetCurrent()); + } +#else + inline HandleScope() : scope() {} + inline static int NumberOfHandles() { + return v8::HandleScope::NumberOfHandles(); + } +#endif + + private: + // Make it hard to create heap-allocated or illegal handle scopes by + // disallowing certain operations. + HandleScope(const HandleScope &); + void operator=(const HandleScope &); + void *operator new(size_t size); + void operator delete(void *, size_t) { + abort(); + } +}; + +class EscapableHandleScope { + public: +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + inline EscapableHandleScope() : scope(v8::Isolate::GetCurrent()) {} + + inline static int NumberOfHandles() { + return v8::EscapableHandleScope::NumberOfHandles(v8::Isolate::GetCurrent()); + } + + template + inline v8::Local Escape(v8::Local value) { + return scope.Escape(value); + } + + private: + v8::EscapableHandleScope scope; +#else + inline EscapableHandleScope() : scope() {} + + inline static int NumberOfHandles() { + return v8::HandleScope::NumberOfHandles(); + } + + template + inline v8::Local Escape(v8::Local value) { + return scope.Close(value); + } + + private: + v8::HandleScope scope; +#endif + + private: + // Make it hard to create heap-allocated or illegal handle scopes by + // disallowing certain operations. + EscapableHandleScope(const EscapableHandleScope &); + void operator=(const EscapableHandleScope &); + void *operator new(size_t size); + void operator delete(void *, size_t) { + abort(); + } +}; + +//=== TryCatch ================================================================= + +class TryCatch { + v8::TryCatch try_catch_; + friend void FatalException(const TryCatch&); + + public: +#if NODE_MODULE_VERSION > NODE_0_12_MODULE_VERSION + TryCatch() : try_catch_(v8::Isolate::GetCurrent()) {} +#endif + + inline bool HasCaught() const { return try_catch_.HasCaught(); } + + inline bool CanContinue() const { return try_catch_.CanContinue(); } + + inline v8::Local ReThrow() { +#if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + return New(try_catch_.ReThrow()); +#else + return try_catch_.ReThrow(); +#endif + } + + inline v8::Local Exception() const { + return try_catch_.Exception(); + } + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + inline v8::MaybeLocal StackTrace() const { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(try_catch_.StackTrace(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); + } +#else + inline MaybeLocal StackTrace() const { + return try_catch_.StackTrace(); + } +#endif + + inline v8::Local Message() const { + return try_catch_.Message(); + } + + inline void Reset() { try_catch_.Reset(); } + + inline void SetVerbose(bool value) { try_catch_.SetVerbose(value); } + + inline void SetCaptureMessage(bool value) { + try_catch_.SetCaptureMessage(value); + } +}; + +v8::Local MakeCallback(v8::Local target, + v8::Local func, + int argc, + v8::Local* argv); +v8::Local MakeCallback(v8::Local target, + v8::Local symbol, + int argc, + v8::Local* argv); +v8::Local MakeCallback(v8::Local target, + const char* method, + int argc, + v8::Local* argv); + +// === AsyncResource =========================================================== + +class AsyncResource { + public: + AsyncResource( + v8::Local name + , v8::Local resource = New()) { +#if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + + if (resource.IsEmpty()) { + resource = New(); + } + + context = node::EmitAsyncInit(isolate, resource, name); +#endif + } + + AsyncResource( + const char* name + , v8::Local resource = New()) { +#if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + + if (resource.IsEmpty()) { + resource = New(); + } + + v8::Local name_string = + New(name).ToLocalChecked(); + context = node::EmitAsyncInit(isolate, resource, name_string); +#endif + } + + ~AsyncResource() { +#if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + node::EmitAsyncDestroy(isolate, context); +#endif + } + + inline MaybeLocal runInAsyncScope( + v8::Local target + , v8::Local func + , int argc + , v8::Local* argv) { +#if NODE_MODULE_VERSION < NODE_9_0_MODULE_VERSION + return MakeCallback(target, func, argc, argv); +#else + return node::MakeCallback( + v8::Isolate::GetCurrent(), target, func, argc, argv, context); +#endif + } + + inline MaybeLocal runInAsyncScope( + v8::Local target + , v8::Local symbol + , int argc + , v8::Local* argv) { +#if NODE_MODULE_VERSION < NODE_9_0_MODULE_VERSION + return MakeCallback(target, symbol, argc, argv); +#else + return node::MakeCallback( + v8::Isolate::GetCurrent(), target, symbol, argc, argv, context); +#endif + } + + inline MaybeLocal runInAsyncScope( + v8::Local target + , const char* method + , int argc + , v8::Local* argv) { +#if NODE_MODULE_VERSION < NODE_9_0_MODULE_VERSION + return MakeCallback(target, method, argc, argv); +#else + return node::MakeCallback( + v8::Isolate::GetCurrent(), target, method, argc, argv, context); +#endif + } + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(AsyncResource) +#if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + node::async_context context; +#endif +}; + +inline uv_loop_t* GetCurrentEventLoop() { +#if NODE_MAJOR_VERSION >= 10 || \ + NODE_MAJOR_VERSION == 9 && NODE_MINOR_VERSION >= 3 || \ + NODE_MAJOR_VERSION == 8 && NODE_MINOR_VERSION >= 10 + return node::GetCurrentEventLoop(v8::Isolate::GetCurrent()); +#else + return uv_default_loop(); +#endif +} + +//============ ================================================================= + +/* node 0.12 */ +#if NODE_MODULE_VERSION >= NODE_0_12_MODULE_VERSION + inline + void SetCounterFunction(v8::CounterLookupCallback cb) { + v8::Isolate::GetCurrent()->SetCounterFunction(cb); + } + + inline + void SetCreateHistogramFunction(v8::CreateHistogramCallback cb) { + v8::Isolate::GetCurrent()->SetCreateHistogramFunction(cb); + } + + inline + void SetAddHistogramSampleFunction(v8::AddHistogramSampleCallback cb) { + v8::Isolate::GetCurrent()->SetAddHistogramSampleFunction(cb); + } + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + inline bool IdleNotification(int idle_time_in_ms) { + return v8::Isolate::GetCurrent()->IdleNotificationDeadline( + idle_time_in_ms * 0.001); + } +# else + inline bool IdleNotification(int idle_time_in_ms) { + return v8::Isolate::GetCurrent()->IdleNotification(idle_time_in_ms); + } +#endif + + inline void LowMemoryNotification() { + v8::Isolate::GetCurrent()->LowMemoryNotification(); + } + + inline void ContextDisposedNotification() { + v8::Isolate::GetCurrent()->ContextDisposedNotification(); + } +#else + inline + void SetCounterFunction(v8::CounterLookupCallback cb) { + v8::V8::SetCounterFunction(cb); + } + + inline + void SetCreateHistogramFunction(v8::CreateHistogramCallback cb) { + v8::V8::SetCreateHistogramFunction(cb); + } + + inline + void SetAddHistogramSampleFunction(v8::AddHistogramSampleCallback cb) { + v8::V8::SetAddHistogramSampleFunction(cb); + } + + inline bool IdleNotification(int idle_time_in_ms) { + return v8::V8::IdleNotification(idle_time_in_ms); + } + + inline void LowMemoryNotification() { + v8::V8::LowMemoryNotification(); + } + + inline void ContextDisposedNotification() { + v8::V8::ContextDisposedNotification(); + } +#endif + +#if (NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION) // Node 0.12 + inline v8::Local Undefined() { +# if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + EscapableHandleScope scope; + return scope.Escape(New(v8::Undefined(v8::Isolate::GetCurrent()))); +# else + return v8::Undefined(v8::Isolate::GetCurrent()); +# endif + } + + inline v8::Local Null() { +# if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + EscapableHandleScope scope; + return scope.Escape(New(v8::Null(v8::Isolate::GetCurrent()))); +# else + return v8::Null(v8::Isolate::GetCurrent()); +# endif + } + + inline v8::Local True() { +# if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + EscapableHandleScope scope; + return scope.Escape(New(v8::True(v8::Isolate::GetCurrent()))); +# else + return v8::True(v8::Isolate::GetCurrent()); +# endif + } + + inline v8::Local False() { +# if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + EscapableHandleScope scope; + return scope.Escape(New(v8::False(v8::Isolate::GetCurrent()))); +# else + return v8::False(v8::Isolate::GetCurrent()); +# endif + } + + inline v8::Local EmptyString() { + return v8::String::Empty(v8::Isolate::GetCurrent()); + } + + inline int AdjustExternalMemory(int bc) { + return static_cast( + v8::Isolate::GetCurrent()->AdjustAmountOfExternalAllocatedMemory(bc)); + } + + inline void SetTemplate( + v8::Local templ + , const char *name + , v8::Local value) { + templ->Set(v8::Isolate::GetCurrent(), name, value); + } + + inline void SetTemplate( + v8::Local templ + , v8::Local name + , v8::Local value + , v8::PropertyAttribute attributes) { + templ->Set(name, value, attributes); + } + + inline v8::Local GetCurrentContext() { + return v8::Isolate::GetCurrent()->GetCurrentContext(); + } + + inline void* GetInternalFieldPointer( + v8::Local object + , int index) { + return object->GetAlignedPointerFromInternalField(index); + } + + inline void SetInternalFieldPointer( + v8::Local object + , int index + , void* value) { + object->SetAlignedPointerInInternalField(index, value); + } + +# define NAN_GC_CALLBACK(name) \ + void name(v8::Isolate *isolate, v8::GCType type, v8::GCCallbackFlags flags) + +#if NODE_MODULE_VERSION <= NODE_4_0_MODULE_VERSION + typedef v8::Isolate::GCEpilogueCallback GCEpilogueCallback; + typedef v8::Isolate::GCPrologueCallback GCPrologueCallback; +#else + typedef v8::Isolate::GCCallback GCEpilogueCallback; + typedef v8::Isolate::GCCallback GCPrologueCallback; +#endif + + inline void AddGCEpilogueCallback( + GCEpilogueCallback callback + , v8::GCType gc_type_filter = v8::kGCTypeAll) { + v8::Isolate::GetCurrent()->AddGCEpilogueCallback(callback, gc_type_filter); + } + + inline void RemoveGCEpilogueCallback( + GCEpilogueCallback callback) { + v8::Isolate::GetCurrent()->RemoveGCEpilogueCallback(callback); + } + + inline void AddGCPrologueCallback( + GCPrologueCallback callback + , v8::GCType gc_type_filter = v8::kGCTypeAll) { + v8::Isolate::GetCurrent()->AddGCPrologueCallback(callback, gc_type_filter); + } + + inline void RemoveGCPrologueCallback( + GCPrologueCallback callback) { + v8::Isolate::GetCurrent()->RemoveGCPrologueCallback(callback); + } + + inline void GetHeapStatistics( + v8::HeapStatistics *heap_statistics) { + v8::Isolate::GetCurrent()->GetHeapStatistics(heap_statistics); + } + +# define X(NAME) \ + inline v8::Local NAME(const char *msg) { \ + EscapableHandleScope scope; \ + return scope.Escape(v8::Exception::NAME(New(msg).ToLocalChecked())); \ + } \ + \ + inline \ + v8::Local NAME(v8::Local msg) { \ + return v8::Exception::NAME(msg); \ + } \ + \ + inline void Throw ## NAME(const char *msg) { \ + HandleScope scope; \ + v8::Isolate::GetCurrent()->ThrowException( \ + v8::Exception::NAME(New(msg).ToLocalChecked())); \ + } \ + \ + inline void Throw ## NAME(v8::Local msg) { \ + HandleScope scope; \ + v8::Isolate::GetCurrent()->ThrowException( \ + v8::Exception::NAME(msg)); \ + } + + X(Error) + X(RangeError) + X(ReferenceError) + X(SyntaxError) + X(TypeError) + +# undef X + + inline void ThrowError(v8::Local error) { + v8::Isolate::GetCurrent()->ThrowException(error); + } + + inline MaybeLocal NewBuffer( + char *data + , size_t length +#if NODE_MODULE_VERSION > IOJS_2_0_MODULE_VERSION + , node::Buffer::FreeCallback callback +#else + , node::smalloc::FreeCallback callback +#endif + , void *hint + ) { + // arbitrary buffer lengths requires + // NODE_MODULE_VERSION >= IOJS_3_0_MODULE_VERSION + assert(length <= imp::kMaxLength && "too large buffer"); +#if NODE_MODULE_VERSION > IOJS_2_0_MODULE_VERSION + return node::Buffer::New( + v8::Isolate::GetCurrent(), data, length, callback, hint); +#else + return node::Buffer::New(v8::Isolate::GetCurrent(), data, length, callback, + hint); +#endif + } + + inline MaybeLocal CopyBuffer( + const char *data + , uint32_t size + ) { + // arbitrary buffer lengths requires + // NODE_MODULE_VERSION >= IOJS_3_0_MODULE_VERSION + assert(size <= imp::kMaxLength && "too large buffer"); +#if NODE_MODULE_VERSION > IOJS_2_0_MODULE_VERSION + return node::Buffer::Copy( + v8::Isolate::GetCurrent(), data, size); +#else + return node::Buffer::New(v8::Isolate::GetCurrent(), data, size); +#endif + } + + inline MaybeLocal NewBuffer(uint32_t size) { + // arbitrary buffer lengths requires + // NODE_MODULE_VERSION >= IOJS_3_0_MODULE_VERSION + assert(size <= imp::kMaxLength && "too large buffer"); +#if NODE_MODULE_VERSION > IOJS_2_0_MODULE_VERSION + return node::Buffer::New( + v8::Isolate::GetCurrent(), size); +#else + return node::Buffer::New(v8::Isolate::GetCurrent(), size); +#endif + } + + inline MaybeLocal NewBuffer( + char* data + , uint32_t size + ) { + // arbitrary buffer lengths requires + // NODE_MODULE_VERSION >= IOJS_3_0_MODULE_VERSION + assert(size <= imp::kMaxLength && "too large buffer"); +#if NODE_MODULE_VERSION > IOJS_2_0_MODULE_VERSION + return node::Buffer::New(v8::Isolate::GetCurrent(), data, size); +#else + return node::Buffer::Use(v8::Isolate::GetCurrent(), data, size); +#endif + } + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + inline MaybeLocal + NewOneByteString(const uint8_t * value, int length = -1) { + return v8::String::NewFromOneByte(v8::Isolate::GetCurrent(), value, + v8::NewStringType::kNormal, length); + } + + inline MaybeLocal CompileScript( + v8::Local s + , const v8::ScriptOrigin& origin + ) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + v8::ScriptCompiler::Source source(s, origin); + return scope.Escape( + v8::ScriptCompiler::Compile(isolate->GetCurrentContext(), &source) + .FromMaybe(v8::Local())); + } + + inline MaybeLocal CompileScript( + v8::Local s + ) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + v8::ScriptCompiler::Source source(s); + return scope.Escape( + v8::ScriptCompiler::Compile(isolate->GetCurrentContext(), &source) + .FromMaybe(v8::Local())); + } + + inline MaybeLocal RunScript( + v8::Local script + ) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(script->BindToCurrentContext() + ->Run(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); + } + + inline MaybeLocal RunScript( + v8::Local script + ) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(script->Run(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); + } +#else + inline MaybeLocal + NewOneByteString(const uint8_t * value, int length = -1) { + return v8::String::NewFromOneByte(v8::Isolate::GetCurrent(), value, + v8::String::kNormalString, length); + } + + inline MaybeLocal CompileScript( + v8::Local s + , const v8::ScriptOrigin& origin + ) { + v8::ScriptCompiler::Source source(s, origin); + return v8::ScriptCompiler::Compile(v8::Isolate::GetCurrent(), &source); + } + + inline MaybeLocal CompileScript( + v8::Local s + ) { + v8::ScriptCompiler::Source source(s); + return v8::ScriptCompiler::Compile(v8::Isolate::GetCurrent(), &source); + } + + inline MaybeLocal RunScript( + v8::Local script + ) { + EscapableHandleScope scope; + return scope.Escape(script->BindToCurrentContext()->Run()); + } + + inline MaybeLocal RunScript( + v8::Local script + ) { + return script->Run(); + } +#endif + + NAN_DEPRECATED inline v8::Local MakeCallback( + v8::Local target + , v8::Local func + , int argc + , v8::Local* argv) { +#if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + EscapableHandleScope scope; + return scope.Escape(New(node::MakeCallback( + v8::Isolate::GetCurrent(), target, func, argc, argv))); +#else +# if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + AsyncResource res("nan:makeCallback"); + return res.runInAsyncScope(target, func, argc, argv) + .FromMaybe(v8::Local()); +# else + return node::MakeCallback( + v8::Isolate::GetCurrent(), target, func, argc, argv); +# endif // NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION +#endif // NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + } + + NAN_DEPRECATED inline v8::Local MakeCallback( + v8::Local target + , v8::Local symbol + , int argc + , v8::Local* argv) { +#if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + EscapableHandleScope scope; + return scope.Escape(New(node::MakeCallback( + v8::Isolate::GetCurrent(), target, symbol, argc, argv))); +#else +# if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + AsyncResource res("nan:makeCallback"); + return res.runInAsyncScope(target, symbol, argc, argv) + .FromMaybe(v8::Local()); +# else + return node::MakeCallback( + v8::Isolate::GetCurrent(), target, symbol, argc, argv); +# endif // NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION +#endif // NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + } + + NAN_DEPRECATED inline v8::Local MakeCallback( + v8::Local target + , const char* method + , int argc + , v8::Local* argv) { +#if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + EscapableHandleScope scope; + return scope.Escape(New(node::MakeCallback( + v8::Isolate::GetCurrent(), target, method, argc, argv))); +#else +# if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + AsyncResource res("nan:makeCallback"); + return res.runInAsyncScope(target, method, argc, argv) + .FromMaybe(v8::Local()); +# else + return node::MakeCallback( + v8::Isolate::GetCurrent(), target, method, argc, argv); +# endif // NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION +#endif // NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + } + + inline void FatalException(const TryCatch& try_catch) { + node::FatalException(v8::Isolate::GetCurrent(), try_catch.try_catch_); + } + + inline v8::Local ErrnoException( + int errorno + , const char* syscall = NULL + , const char* message = NULL + , const char* path = NULL) { + return node::ErrnoException(v8::Isolate::GetCurrent(), errorno, syscall, + message, path); + } + + NAN_DEPRECATED inline v8::Local NanErrnoException( + int errorno + , const char* syscall = NULL + , const char* message = NULL + , const char* path = NULL) { + return ErrnoException(errorno, syscall, message, path); + } + + template + inline void SetIsolateData( + v8::Isolate *isolate + , T *data + ) { + isolate->SetData(0, data); + } + + template + inline T *GetIsolateData( + v8::Isolate *isolate + ) { + return static_cast(isolate->GetData(0)); + } + +class Utf8String { + public: + inline explicit Utf8String(v8::Local from) : + length_(0), str_(str_st_) { + HandleScope scope; + if (!from.IsEmpty()) { +#if NODE_MAJOR_VERSION >= 10 + v8::Local context = GetCurrentContext(); + v8::Local string = + from->ToString(context).FromMaybe(v8::Local()); +#else + v8::Local string = from->ToString(); +#endif + if (!string.IsEmpty()) { + size_t len = 3 * string->Length() + 1; + assert(len <= INT_MAX); + if (len > sizeof (str_st_)) { + str_ = static_cast(malloc(len)); + assert(str_ != 0); + } + const int flags = + v8::String::NO_NULL_TERMINATION | imp::kReplaceInvalidUtf8; +#if NODE_MAJOR_VERSION >= 11 + length_ = string->WriteUtf8(v8::Isolate::GetCurrent(), str_, + static_cast(len), 0, flags); +#else + // See https://github.com/nodejs/nan/issues/832. + // Disable the warning as there is no way around it. +#ifdef _MSC_VER +#pragma warning(push) +#pragma warning(disable : 4996) +#endif +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#endif + length_ = string->WriteUtf8(str_, static_cast(len), 0, flags); +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif +#ifdef _MSC_VER +#pragma warning(pop) +#endif +#endif // NODE_MAJOR_VERSION < 11 + str_[length_] = '\0'; + } + } + } + + inline int length() const { + return length_; + } + + inline char* operator*() { return str_; } + inline const char* operator*() const { return str_; } + + inline ~Utf8String() { + if (str_ != str_st_) { + free(str_); + } + } + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(Utf8String) + + int length_; + char *str_; + char str_st_[1024]; +}; + +#else // Node 0.8 and 0.10 + inline v8::Local Undefined() { + EscapableHandleScope scope; + return scope.Escape(New(v8::Undefined())); + } + + inline v8::Local Null() { + EscapableHandleScope scope; + return scope.Escape(New(v8::Null())); + } + + inline v8::Local True() { + EscapableHandleScope scope; + return scope.Escape(New(v8::True())); + } + + inline v8::Local False() { + EscapableHandleScope scope; + return scope.Escape(New(v8::False())); + } + + inline v8::Local EmptyString() { + return v8::String::Empty(); + } + + inline int AdjustExternalMemory(int bc) { + return static_cast(v8::V8::AdjustAmountOfExternalAllocatedMemory(bc)); + } + + inline void SetTemplate( + v8::Local templ + , const char *name + , v8::Local value) { + templ->Set(name, value); + } + + inline void SetTemplate( + v8::Local templ + , v8::Local name + , v8::Local value + , v8::PropertyAttribute attributes) { + templ->Set(name, value, attributes); + } + + inline v8::Local GetCurrentContext() { + return v8::Context::GetCurrent(); + } + + inline void* GetInternalFieldPointer( + v8::Local object + , int index) { + return object->GetPointerFromInternalField(index); + } + + inline void SetInternalFieldPointer( + v8::Local object + , int index + , void* value) { + object->SetPointerInInternalField(index, value); + } + +# define NAN_GC_CALLBACK(name) \ + void name(v8::GCType type, v8::GCCallbackFlags flags) + + inline void AddGCEpilogueCallback( + v8::GCEpilogueCallback callback + , v8::GCType gc_type_filter = v8::kGCTypeAll) { + v8::V8::AddGCEpilogueCallback(callback, gc_type_filter); + } + inline void RemoveGCEpilogueCallback( + v8::GCEpilogueCallback callback) { + v8::V8::RemoveGCEpilogueCallback(callback); + } + inline void AddGCPrologueCallback( + v8::GCPrologueCallback callback + , v8::GCType gc_type_filter = v8::kGCTypeAll) { + v8::V8::AddGCPrologueCallback(callback, gc_type_filter); + } + inline void RemoveGCPrologueCallback( + v8::GCPrologueCallback callback) { + v8::V8::RemoveGCPrologueCallback(callback); + } + inline void GetHeapStatistics( + v8::HeapStatistics *heap_statistics) { + v8::V8::GetHeapStatistics(heap_statistics); + } + +# define X(NAME) \ + inline v8::Local NAME(const char *msg) { \ + EscapableHandleScope scope; \ + return scope.Escape(v8::Exception::NAME(New(msg).ToLocalChecked())); \ + } \ + \ + inline \ + v8::Local NAME(v8::Local msg) { \ + return v8::Exception::NAME(msg); \ + } \ + \ + inline void Throw ## NAME(const char *msg) { \ + HandleScope scope; \ + v8::ThrowException(v8::Exception::NAME(New(msg).ToLocalChecked())); \ + } \ + \ + inline \ + void Throw ## NAME(v8::Local errmsg) { \ + HandleScope scope; \ + v8::ThrowException(v8::Exception::NAME(errmsg)); \ + } + + X(Error) + X(RangeError) + X(ReferenceError) + X(SyntaxError) + X(TypeError) + +# undef X + + inline void ThrowError(v8::Local error) { + v8::ThrowException(error); + } + + inline MaybeLocal NewBuffer( + char *data + , size_t length + , node::Buffer::free_callback callback + , void *hint + ) { + EscapableHandleScope scope; + // arbitrary buffer lengths requires + // NODE_MODULE_VERSION >= IOJS_3_0_MODULE_VERSION + assert(length <= imp::kMaxLength && "too large buffer"); + return scope.Escape( + New(node::Buffer::New(data, length, callback, hint)->handle_)); + } + + inline MaybeLocal CopyBuffer( + const char *data + , uint32_t size + ) { + EscapableHandleScope scope; + // arbitrary buffer lengths requires + // NODE_MODULE_VERSION >= IOJS_3_0_MODULE_VERSION + assert(size <= imp::kMaxLength && "too large buffer"); +#if NODE_MODULE_VERSION >= NODE_0_10_MODULE_VERSION + return scope.Escape(New(node::Buffer::New(data, size)->handle_)); +#else + return scope.Escape( + New(node::Buffer::New(const_cast(data), size)->handle_)); +#endif + } + + inline MaybeLocal NewBuffer(uint32_t size) { + // arbitrary buffer lengths requires + // NODE_MODULE_VERSION >= IOJS_3_0_MODULE_VERSION + EscapableHandleScope scope; + assert(size <= imp::kMaxLength && "too large buffer"); + return scope.Escape(New(node::Buffer::New(size)->handle_)); + } + + inline void FreeData(char *data, void *hint) { + (void) hint; // unused + delete[] data; + } + + inline MaybeLocal NewBuffer( + char* data + , uint32_t size + ) { + EscapableHandleScope scope; + // arbitrary buffer lengths requires + // NODE_MODULE_VERSION >= IOJS_3_0_MODULE_VERSION + assert(size <= imp::kMaxLength && "too large buffer"); + return scope.Escape( + New(node::Buffer::New(data, size, FreeData, NULL)->handle_)); + } + +namespace imp { +inline void +widenString(std::vector *ws, const uint8_t *s, int l) { + size_t len = static_cast(l); + if (l < 0) { + len = strlen(reinterpret_cast(s)); + } + assert(len <= INT_MAX && "string too long"); + ws->resize(len); + std::copy(s, s + len, ws->begin()); // NOLINT(build/include_what_you_use) +} +} // end of namespace imp + + inline MaybeLocal + NewOneByteString(const uint8_t * value, int length = -1) { + std::vector wideString; // NOLINT(build/include_what_you_use) + imp::widenString(&wideString, value, length); + return v8::String::New(wideString.data(), + static_cast(wideString.size())); + } + + inline MaybeLocal CompileScript( + v8::Local s + , const v8::ScriptOrigin& origin + ) { + return v8::Script::Compile(s, const_cast(&origin)); + } + + inline MaybeLocal CompileScript( + v8::Local s + ) { + return v8::Script::Compile(s); + } + + inline + MaybeLocal RunScript(v8::Local script) { + return script->Run(); + } + + inline v8::Local MakeCallback( + v8::Local target + , v8::Local func + , int argc + , v8::Local* argv) { + v8::HandleScope scope; + return scope.Close(New(node::MakeCallback(target, func, argc, argv))); + } + + inline v8::Local MakeCallback( + v8::Local target + , v8::Local symbol + , int argc + , v8::Local* argv) { + v8::HandleScope scope; + return scope.Close(New(node::MakeCallback(target, symbol, argc, argv))); + } + + inline v8::Local MakeCallback( + v8::Local target + , const char* method + , int argc + , v8::Local* argv) { + v8::HandleScope scope; + return scope.Close(New(node::MakeCallback(target, method, argc, argv))); + } + + inline void FatalException(const TryCatch& try_catch) { + node::FatalException(const_cast(try_catch.try_catch_)); + } + + inline v8::Local ErrnoException( + int errorno + , const char* syscall = NULL + , const char* message = NULL + , const char* path = NULL) { + return node::ErrnoException(errorno, syscall, message, path); + } + + NAN_DEPRECATED inline v8::Local NanErrnoException( + int errorno + , const char* syscall = NULL + , const char* message = NULL + , const char* path = NULL) { + return ErrnoException(errorno, syscall, message, path); + } + + + template + inline void SetIsolateData( + v8::Isolate *isolate + , T *data + ) { + isolate->SetData(data); + } + + template + inline T *GetIsolateData( + v8::Isolate *isolate + ) { + return static_cast(isolate->GetData()); + } + +class Utf8String { + public: + inline explicit Utf8String(v8::Local from) : + length_(0), str_(str_st_) { + v8::HandleScope scope; + if (!from.IsEmpty()) { + v8::Local string = from->ToString(); + if (!string.IsEmpty()) { + size_t len = 3 * string->Length() + 1; + assert(len <= INT_MAX); + if (len > sizeof (str_st_)) { + str_ = static_cast(malloc(len)); + assert(str_ != 0); + } + const int flags = + v8::String::NO_NULL_TERMINATION | imp::kReplaceInvalidUtf8; + length_ = string->WriteUtf8(str_, static_cast(len), 0, flags); + str_[length_] = '\0'; + } + } + } + + inline int length() const { + return length_; + } + + inline char* operator*() { return str_; } + inline const char* operator*() const { return str_; } + + inline ~Utf8String() { + if (str_ != str_st_) { + free(str_); + } + } + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(Utf8String) + + int length_; + char *str_; + char str_st_[1024]; +}; + +#endif // NODE_MODULE_VERSION + +typedef void (*FreeCallback)(char *data, void *hint); + +typedef const FunctionCallbackInfo& NAN_METHOD_ARGS_TYPE; +typedef void NAN_METHOD_RETURN_TYPE; + +typedef const PropertyCallbackInfo& NAN_GETTER_ARGS_TYPE; +typedef void NAN_GETTER_RETURN_TYPE; + +typedef const PropertyCallbackInfo& NAN_SETTER_ARGS_TYPE; +typedef void NAN_SETTER_RETURN_TYPE; + +typedef const PropertyCallbackInfo& + NAN_PROPERTY_GETTER_ARGS_TYPE; +typedef void NAN_PROPERTY_GETTER_RETURN_TYPE; + +typedef const PropertyCallbackInfo& + NAN_PROPERTY_SETTER_ARGS_TYPE; +typedef void NAN_PROPERTY_SETTER_RETURN_TYPE; + +typedef const PropertyCallbackInfo& + NAN_PROPERTY_ENUMERATOR_ARGS_TYPE; +typedef void NAN_PROPERTY_ENUMERATOR_RETURN_TYPE; + +typedef const PropertyCallbackInfo& + NAN_PROPERTY_DELETER_ARGS_TYPE; +typedef void NAN_PROPERTY_DELETER_RETURN_TYPE; + +typedef const PropertyCallbackInfo& + NAN_PROPERTY_QUERY_ARGS_TYPE; +typedef void NAN_PROPERTY_QUERY_RETURN_TYPE; + +typedef const PropertyCallbackInfo& NAN_INDEX_GETTER_ARGS_TYPE; +typedef void NAN_INDEX_GETTER_RETURN_TYPE; + +typedef const PropertyCallbackInfo& NAN_INDEX_SETTER_ARGS_TYPE; +typedef void NAN_INDEX_SETTER_RETURN_TYPE; + +typedef const PropertyCallbackInfo& + NAN_INDEX_ENUMERATOR_ARGS_TYPE; +typedef void NAN_INDEX_ENUMERATOR_RETURN_TYPE; + +typedef const PropertyCallbackInfo& + NAN_INDEX_DELETER_ARGS_TYPE; +typedef void NAN_INDEX_DELETER_RETURN_TYPE; + +typedef const PropertyCallbackInfo& + NAN_INDEX_QUERY_ARGS_TYPE; +typedef void NAN_INDEX_QUERY_RETURN_TYPE; + +#define NAN_METHOD(name) \ + Nan::NAN_METHOD_RETURN_TYPE name(Nan::NAN_METHOD_ARGS_TYPE info) +#define NAN_GETTER(name) \ + Nan::NAN_GETTER_RETURN_TYPE name( \ + v8::Local property \ + , Nan::NAN_GETTER_ARGS_TYPE info) +#define NAN_SETTER(name) \ + Nan::NAN_SETTER_RETURN_TYPE name( \ + v8::Local property \ + , v8::Local value \ + , Nan::NAN_SETTER_ARGS_TYPE info) +#define NAN_PROPERTY_GETTER(name) \ + Nan::NAN_PROPERTY_GETTER_RETURN_TYPE name( \ + v8::Local property \ + , Nan::NAN_PROPERTY_GETTER_ARGS_TYPE info) +#define NAN_PROPERTY_SETTER(name) \ + Nan::NAN_PROPERTY_SETTER_RETURN_TYPE name( \ + v8::Local property \ + , v8::Local value \ + , Nan::NAN_PROPERTY_SETTER_ARGS_TYPE info) +#define NAN_PROPERTY_ENUMERATOR(name) \ + Nan::NAN_PROPERTY_ENUMERATOR_RETURN_TYPE name( \ + Nan::NAN_PROPERTY_ENUMERATOR_ARGS_TYPE info) +#define NAN_PROPERTY_DELETER(name) \ + Nan::NAN_PROPERTY_DELETER_RETURN_TYPE name( \ + v8::Local property \ + , Nan::NAN_PROPERTY_DELETER_ARGS_TYPE info) +#define NAN_PROPERTY_QUERY(name) \ + Nan::NAN_PROPERTY_QUERY_RETURN_TYPE name( \ + v8::Local property \ + , Nan::NAN_PROPERTY_QUERY_ARGS_TYPE info) +# define NAN_INDEX_GETTER(name) \ + Nan::NAN_INDEX_GETTER_RETURN_TYPE name( \ + uint32_t index \ + , Nan::NAN_INDEX_GETTER_ARGS_TYPE info) +#define NAN_INDEX_SETTER(name) \ + Nan::NAN_INDEX_SETTER_RETURN_TYPE name( \ + uint32_t index \ + , v8::Local value \ + , Nan::NAN_INDEX_SETTER_ARGS_TYPE info) +#define NAN_INDEX_ENUMERATOR(name) \ + Nan::NAN_INDEX_ENUMERATOR_RETURN_TYPE \ + name(Nan::NAN_INDEX_ENUMERATOR_ARGS_TYPE info) +#define NAN_INDEX_DELETER(name) \ + Nan::NAN_INDEX_DELETER_RETURN_TYPE name( \ + uint32_t index \ + , Nan::NAN_INDEX_DELETER_ARGS_TYPE info) +#define NAN_INDEX_QUERY(name) \ + Nan::NAN_INDEX_QUERY_RETURN_TYPE name( \ + uint32_t index \ + , Nan::NAN_INDEX_QUERY_ARGS_TYPE info) + +class Callback { + public: + Callback() {} + + explicit Callback(const v8::Local &fn) : handle_(fn) {} + + ~Callback() { + handle_.Reset(); + } + + bool operator==(const Callback &other) const { + return handle_ == other.handle_; + } + + bool operator!=(const Callback &other) const { + return !operator==(other); + } + + inline + v8::Local operator*() const { return GetFunction(); } + + NAN_DEPRECATED inline v8::Local operator()( + v8::Local target + , int argc = 0 + , v8::Local argv[] = 0) const { +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + v8::Isolate *isolate = v8::Isolate::GetCurrent(); +# if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + AsyncResource async("nan:Callback:operator()"); + return Call_(isolate, target, argc, argv, &async) + .FromMaybe(v8::Local()); +# else + return Call_(isolate, target, argc, argv); +# endif // NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION +#else + return Call_(target, argc, argv); +#endif // NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + } + + NAN_DEPRECATED inline v8::Local operator()( + int argc = 0 + , v8::Local argv[] = 0) const { +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); +# if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + AsyncResource async("nan:Callback:operator()"); + return scope.Escape(Call_(isolate, isolate->GetCurrentContext()->Global(), + argc, argv, &async) + .FromMaybe(v8::Local())); +# else + return scope.Escape( + Call_(isolate, isolate->GetCurrentContext()->Global(), argc, argv)); +# endif // NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION +#else + v8::HandleScope scope; + return scope.Close(Call_(v8::Context::GetCurrent()->Global(), argc, argv)); +#endif // NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + } + + inline MaybeLocal operator()( + AsyncResource* resource + , int argc = 0 + , v8::Local argv[] = 0) const { + return this->Call(argc, argv, resource); + } + + inline MaybeLocal operator()( + AsyncResource* resource + , v8::Local target + , int argc = 0 + , v8::Local argv[] = 0) const { + return this->Call(target, argc, argv, resource); + } + + // TODO(kkoopa): remove + inline void SetFunction(const v8::Local &fn) { + Reset(fn); + } + + inline void Reset(const v8::Local &fn) { + handle_.Reset(fn); + } + + inline void Reset() { + handle_.Reset(); + } + + inline v8::Local GetFunction() const { + return New(handle_); + } + + inline bool IsEmpty() const { + return handle_.IsEmpty(); + } + + // Deprecated: For async callbacks Use the versions that accept an + // AsyncResource. If this callback does not correspond to an async resource, + // that is, it is a synchronous function call on a non-empty JS stack, you + // should Nan::Call instead. + NAN_DEPRECATED inline v8::Local + Call(v8::Local target + , int argc + , v8::Local argv[]) const { +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + v8::Isolate *isolate = v8::Isolate::GetCurrent(); +# if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + AsyncResource async("nan:Callback:Call"); + return Call_(isolate, target, argc, argv, &async) + .FromMaybe(v8::Local()); +# else + return Call_(isolate, target, argc, argv); +# endif // NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION +#else + return Call_(target, argc, argv); +#endif + } + + // Deprecated: For async callbacks Use the versions that accept an + // AsyncResource. If this callback does not correspond to an async resource, + // that is, it is a synchronous function call on a non-empty JS stack, you + // should Nan::Call instead. + NAN_DEPRECATED inline v8::Local + Call(int argc, v8::Local argv[]) const { +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); +# if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + AsyncResource async("nan:Callback:Call"); + return scope.Escape(Call_(isolate, isolate->GetCurrentContext()->Global(), + argc, argv, &async) + .FromMaybe(v8::Local())); +# else + return scope.Escape( + Call_(isolate, isolate->GetCurrentContext()->Global(), argc, argv)); +# endif // NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION +#else + v8::HandleScope scope; + return scope.Close(Call_(v8::Context::GetCurrent()->Global(), argc, argv)); +#endif + } + + inline MaybeLocal + Call(v8::Local target + , int argc + , v8::Local argv[] + , AsyncResource* resource) const { +#if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + return Call_(isolate, target, argc, argv, resource); +#elif NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + return Call_(isolate, target, argc, argv); +#else + return Call_(target, argc, argv); +#endif + } + + inline MaybeLocal + Call(int argc, v8::Local argv[], AsyncResource* resource) const { +#if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + return Call(isolate->GetCurrentContext()->Global(), argc, argv, resource); +#elif NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape( + Call_(isolate, isolate->GetCurrentContext()->Global(), argc, argv)); +#else + v8::HandleScope scope; + return scope.Close(Call_(v8::Context::GetCurrent()->Global(), argc, argv)); +#endif + } + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(Callback) + Persistent handle_; + +#if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + MaybeLocal Call_(v8::Isolate *isolate + , v8::Local target + , int argc + , v8::Local argv[] + , AsyncResource* resource) const { + EscapableHandleScope scope; + v8::Local func = New(handle_); + auto maybe = resource->runInAsyncScope(target, func, argc, argv); + v8::Local local; + if (!maybe.ToLocal(&local)) return MaybeLocal(); + return scope.Escape(local); + } +#elif NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + v8::Local Call_(v8::Isolate *isolate + , v8::Local target + , int argc + , v8::Local argv[]) const { + EscapableHandleScope scope; + + v8::Local callback = New(handle_); +# if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION + return scope.Escape(New(node::MakeCallback( + isolate + , target + , callback + , argc + , argv + ))); +# else + return scope.Escape(node::MakeCallback( + isolate + , target + , callback + , argc + , argv + )); +# endif + } +#else + v8::Local Call_(v8::Local target + , int argc + , v8::Local argv[]) const { + EscapableHandleScope scope; + + v8::Local callback = New(handle_); + return scope.Escape(New(node::MakeCallback( + target + , callback + , argc + , argv + ))); + } +#endif +}; + +inline MaybeLocal Call( + const Nan::Callback& callback + , v8::Local recv + , int argc + , v8::Local argv[]) { + return Call(*callback, recv, argc, argv); +} + +inline MaybeLocal Call( + const Nan::Callback& callback + , int argc + , v8::Local argv[]) { +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape( + Call(*callback, isolate->GetCurrentContext()->Global(), argc, argv) + .FromMaybe(v8::Local())); +#else + EscapableHandleScope scope; + return scope.Escape( + Call(*callback, v8::Context::GetCurrent()->Global(), argc, argv) + .FromMaybe(v8::Local())); +#endif +} + +inline MaybeLocal Call( + v8::Local symbol + , v8::Local recv + , int argc + , v8::Local argv[]) { + EscapableHandleScope scope; + v8::Local fn_v = + Get(recv, symbol).FromMaybe(v8::Local()); + if (fn_v.IsEmpty() || !fn_v->IsFunction()) return v8::Local(); + v8::Local fn = fn_v.As(); + return scope.Escape( + Call(fn, recv, argc, argv).FromMaybe(v8::Local())); +} + +inline MaybeLocal Call( + const char* method + , v8::Local recv + , int argc + , v8::Local argv[]) { + EscapableHandleScope scope; + v8::Local method_string = + New(method).ToLocalChecked(); + return scope.Escape( + Call(method_string, recv, argc, argv).FromMaybe(v8::Local())); +} + +/* abstract */ class AsyncWorker { + public: + explicit AsyncWorker(Callback *callback_, + const char* resource_name = "nan:AsyncWorker") + : callback(callback_), errmsg_(NULL) { + request.data = this; + + HandleScope scope; + v8::Local obj = New(); + persistentHandle.Reset(obj); + async_resource = new AsyncResource(resource_name, obj); + } + + virtual ~AsyncWorker() { + HandleScope scope; + + if (!persistentHandle.IsEmpty()) + persistentHandle.Reset(); + delete callback; + delete[] errmsg_; + delete async_resource; + } + + virtual void WorkComplete() { + HandleScope scope; + + if (errmsg_ == NULL) + HandleOKCallback(); + else + HandleErrorCallback(); + delete callback; + callback = NULL; + } + + inline void SaveToPersistent( + const char *key, const v8::Local &value) { + HandleScope scope; + Set(New(persistentHandle), New(key).ToLocalChecked(), value).FromJust(); + } + + inline void SaveToPersistent( + const v8::Local &key, const v8::Local &value) { + HandleScope scope; + Set(New(persistentHandle), key, value).FromJust(); + } + + inline void SaveToPersistent( + uint32_t index, const v8::Local &value) { + HandleScope scope; + Set(New(persistentHandle), index, value).FromJust(); + } + + inline v8::Local GetFromPersistent(const char *key) const { + EscapableHandleScope scope; + return scope.Escape( + Get(New(persistentHandle), New(key).ToLocalChecked()) + .FromMaybe(v8::Local())); + } + + inline v8::Local + GetFromPersistent(const v8::Local &key) const { + EscapableHandleScope scope; + return scope.Escape( + Get(New(persistentHandle), key) + .FromMaybe(v8::Local())); + } + + inline v8::Local GetFromPersistent(uint32_t index) const { + EscapableHandleScope scope; + return scope.Escape( + Get(New(persistentHandle), index) + .FromMaybe(v8::Local())); + } + + virtual void Execute() = 0; + + uv_work_t request; + + virtual void Destroy() { + delete this; + } + + protected: + Persistent persistentHandle; + Callback *callback; + AsyncResource *async_resource; + + virtual void HandleOKCallback() { + HandleScope scope; + + callback->Call(0, NULL, async_resource); + } + + virtual void HandleErrorCallback() { + HandleScope scope; + + v8::Local argv[] = { + v8::Exception::Error(New(ErrorMessage()).ToLocalChecked()) + }; + callback->Call(1, argv, async_resource); + } + + void SetErrorMessage(const char *msg) { + delete[] errmsg_; + + size_t size = strlen(msg) + 1; + errmsg_ = new char[size]; + memcpy(errmsg_, msg, size); + } + + const char* ErrorMessage() const { + return errmsg_; + } + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(AsyncWorker) + char *errmsg_; +}; + +/* abstract */ class AsyncBareProgressWorkerBase : public AsyncWorker { + public: + explicit AsyncBareProgressWorkerBase( + Callback *callback_, + const char* resource_name = "nan:AsyncBareProgressWorkerBase") + : AsyncWorker(callback_, resource_name) { + uv_async_init( + GetCurrentEventLoop() + , &async + , AsyncProgress_ + ); + async.data = this; + } + + virtual ~AsyncBareProgressWorkerBase() { + } + + virtual void WorkProgress() = 0; + + virtual void Destroy() { + uv_close(reinterpret_cast(&async), AsyncClose_); + } + + private: + inline static NAUV_WORK_CB(AsyncProgress_) { + AsyncBareProgressWorkerBase *worker = + static_cast(async->data); + worker->WorkProgress(); + } + + inline static void AsyncClose_(uv_handle_t* handle) { + AsyncBareProgressWorkerBase *worker = + static_cast(handle->data); + delete worker; + } + + protected: + uv_async_t async; +}; + +template +/* abstract */ +class AsyncBareProgressWorker : public AsyncBareProgressWorkerBase { + public: + explicit AsyncBareProgressWorker( + Callback *callback_, + const char* resource_name = "nan:AsyncBareProgressWorker") + : AsyncBareProgressWorkerBase(callback_, resource_name) { + uv_mutex_init(&async_lock); + } + + virtual ~AsyncBareProgressWorker() { + uv_mutex_destroy(&async_lock); + } + + class ExecutionProgress { + friend class AsyncBareProgressWorker; + public: + void Signal() const { + uv_mutex_lock(&that_->async_lock); + uv_async_send(&that_->async); + uv_mutex_unlock(&that_->async_lock); + } + + void Send(const T* data, size_t count) const { + that_->SendProgress_(data, count); + } + + private: + explicit ExecutionProgress(AsyncBareProgressWorker *that) : that_(that) {} + NAN_DISALLOW_ASSIGN_COPY_MOVE(ExecutionProgress) + AsyncBareProgressWorker* const that_; + }; + + virtual void Execute(const ExecutionProgress& progress) = 0; + virtual void HandleProgressCallback(const T *data, size_t size) = 0; + + protected: + uv_mutex_t async_lock; + + private: + void Execute() /*final override*/ { + ExecutionProgress progress(this); + Execute(progress); + } + + virtual void SendProgress_(const T *data, size_t count) = 0; +}; + +template +/* abstract */ +class AsyncProgressWorkerBase : public AsyncBareProgressWorker { + public: + explicit AsyncProgressWorkerBase( + Callback *callback_, + const char* resource_name = "nan:AsyncProgressWorkerBase") + : AsyncBareProgressWorker(callback_, resource_name), asyncdata_(NULL), + asyncsize_(0) { + } + + virtual ~AsyncProgressWorkerBase() { + delete[] asyncdata_; + } + + void WorkProgress() { + uv_mutex_lock(&this->async_lock); + T *data = asyncdata_; + size_t size = asyncsize_; + asyncdata_ = NULL; + asyncsize_ = 0; + uv_mutex_unlock(&this->async_lock); + + // Don't send progress events after we've already completed. + if (this->callback) { + this->HandleProgressCallback(data, size); + } + delete[] data; + } + + private: + void SendProgress_(const T *data, size_t count) { + T *new_data = new T[count]; + { + T *it = new_data; + std::copy(data, data + count, it); + } + + uv_mutex_lock(&this->async_lock); + T *old_data = asyncdata_; + asyncdata_ = new_data; + asyncsize_ = count; + uv_async_send(&this->async); + uv_mutex_unlock(&this->async_lock); + + delete[] old_data; + } + + T *asyncdata_; + size_t asyncsize_; +}; + +// This ensures compatibility to the previous un-templated AsyncProgressWorker +// class definition. +typedef AsyncProgressWorkerBase AsyncProgressWorker; + +template +/* abstract */ +class AsyncBareProgressQueueWorker : public AsyncBareProgressWorkerBase { + public: + explicit AsyncBareProgressQueueWorker( + Callback *callback_, + const char* resource_name = "nan:AsyncBareProgressQueueWorker") + : AsyncBareProgressWorkerBase(callback_, resource_name) { + } + + virtual ~AsyncBareProgressQueueWorker() { + } + + class ExecutionProgress { + friend class AsyncBareProgressQueueWorker; + public: + void Send(const T* data, size_t count) const { + that_->SendProgress_(data, count); + } + + private: + explicit ExecutionProgress(AsyncBareProgressQueueWorker *that) + : that_(that) {} + NAN_DISALLOW_ASSIGN_COPY_MOVE(ExecutionProgress) + AsyncBareProgressQueueWorker* const that_; + }; + + virtual void Execute(const ExecutionProgress& progress) = 0; + virtual void HandleProgressCallback(const T *data, size_t size) = 0; + + private: + void Execute() /*final override*/ { + ExecutionProgress progress(this); + Execute(progress); + } + + virtual void SendProgress_(const T *data, size_t count) = 0; +}; + +template +/* abstract */ +class AsyncProgressQueueWorker : public AsyncBareProgressQueueWorker { + public: + explicit AsyncProgressQueueWorker( + Callback *callback_, + const char* resource_name = "nan:AsyncProgressQueueWorker") + : AsyncBareProgressQueueWorker(callback_) { + uv_mutex_init(&async_lock); + } + + virtual ~AsyncProgressQueueWorker() { + uv_mutex_lock(&async_lock); + + while (!asyncdata_.empty()) { + std::pair &datapair = asyncdata_.front(); + T *data = datapair.first; + + asyncdata_.pop(); + + delete[] data; + } + + uv_mutex_unlock(&async_lock); + uv_mutex_destroy(&async_lock); + } + + void WorkComplete() { + WorkProgress(); + AsyncWorker::WorkComplete(); + } + + void WorkProgress() { + uv_mutex_lock(&async_lock); + + while (!asyncdata_.empty()) { + std::pair &datapair = asyncdata_.front(); + + T *data = datapair.first; + size_t size = datapair.second; + + asyncdata_.pop(); + uv_mutex_unlock(&async_lock); + + // Don't send progress events after we've already completed. + if (this->callback) { + this->HandleProgressCallback(data, size); + } + + delete[] data; + + uv_mutex_lock(&async_lock); + } + + uv_mutex_unlock(&async_lock); + } + + private: + void SendProgress_(const T *data, size_t count) { + T *new_data = new T[count]; + { + T *it = new_data; + std::copy(data, data + count, it); + } + + uv_mutex_lock(&async_lock); + asyncdata_.push(std::pair(new_data, count)); + uv_mutex_unlock(&async_lock); + + uv_async_send(&this->async); + } + + uv_mutex_t async_lock; + std::queue > asyncdata_; +}; + +inline void AsyncExecute (uv_work_t* req) { + AsyncWorker *worker = static_cast(req->data); + worker->Execute(); +} + +inline void AsyncExecuteComplete (uv_work_t* req) { + AsyncWorker* worker = static_cast(req->data); + worker->WorkComplete(); + worker->Destroy(); +} + +inline void AsyncQueueWorker (AsyncWorker* worker) { + uv_queue_work( + GetCurrentEventLoop() + , &worker->request + , AsyncExecute + , reinterpret_cast(AsyncExecuteComplete) + ); +} + +namespace imp { + +inline +ExternalOneByteStringResource const* +GetExternalResource(v8::Local str) { +#if NODE_MODULE_VERSION < ATOM_0_21_MODULE_VERSION + return str->GetExternalAsciiStringResource(); +#else + return str->GetExternalOneByteStringResource(); +#endif +} + +inline +bool +IsExternal(v8::Local str) { +#if NODE_MODULE_VERSION < ATOM_0_21_MODULE_VERSION + return str->IsExternalAscii(); +#else + return str->IsExternalOneByte(); +#endif +} + +} // end of namespace imp + +enum Encoding {ASCII, UTF8, BASE64, UCS2, BINARY, HEX, BUFFER}; + +#if NODE_MODULE_VERSION < NODE_0_10_MODULE_VERSION +# include "nan_string_bytes.h" // NOLINT(build/include) +#endif + +inline v8::Local Encode( + const void *buf, size_t len, enum Encoding encoding = BINARY) { +#if (NODE_MODULE_VERSION >= ATOM_0_21_MODULE_VERSION) + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + node::encoding node_enc = static_cast(encoding); + + if (encoding == UCS2) { + return node::Encode( + isolate + , reinterpret_cast(buf) + , len / 2); + } else { + return node::Encode( + isolate + , reinterpret_cast(buf) + , len + , node_enc); + } +#elif (NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION) + return node::Encode( + v8::Isolate::GetCurrent() + , buf, len + , static_cast(encoding)); +#else +# if NODE_MODULE_VERSION >= NODE_0_10_MODULE_VERSION + return node::Encode(buf, len, static_cast(encoding)); +# else + return imp::Encode(reinterpret_cast(buf), len, encoding); +# endif +#endif +} + +inline ssize_t DecodeBytes( + v8::Local val, enum Encoding encoding = BINARY) { +#if (NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION) + return node::DecodeBytes( + v8::Isolate::GetCurrent() + , val + , static_cast(encoding)); +#else +# if (NODE_MODULE_VERSION < NODE_0_10_MODULE_VERSION) + if (encoding == BUFFER) { + return node::DecodeBytes(val, node::BINARY); + } +# endif + return node::DecodeBytes(val, static_cast(encoding)); +#endif +} + +inline ssize_t DecodeWrite( + char *buf + , size_t len + , v8::Local val + , enum Encoding encoding = BINARY) { +#if (NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION) + return node::DecodeWrite( + v8::Isolate::GetCurrent() + , buf + , len + , val + , static_cast(encoding)); +#else +# if (NODE_MODULE_VERSION < NODE_0_10_MODULE_VERSION) + if (encoding == BUFFER) { + return node::DecodeWrite(buf, len, val, node::BINARY); + } +# endif + return node::DecodeWrite( + buf + , len + , val + , static_cast(encoding)); +#endif +} + +inline void SetPrototypeTemplate( + v8::Local templ + , const char *name + , v8::Local value +) { + HandleScope scope; + SetTemplate(templ->PrototypeTemplate(), name, value); +} + +inline void SetPrototypeTemplate( + v8::Local templ + , v8::Local name + , v8::Local value + , v8::PropertyAttribute attributes +) { + HandleScope scope; + SetTemplate(templ->PrototypeTemplate(), name, value, attributes); +} + +inline void SetInstanceTemplate( + v8::Local templ + , const char *name + , v8::Local value +) { + HandleScope scope; + SetTemplate(templ->InstanceTemplate(), name, value); +} + +inline void SetInstanceTemplate( + v8::Local templ + , v8::Local name + , v8::Local value + , v8::PropertyAttribute attributes +) { + HandleScope scope; + SetTemplate(templ->InstanceTemplate(), name, value, attributes); +} + +namespace imp { + +// Note(@agnat): Helper to distinguish different receiver types. The first +// version deals with receivers derived from v8::Template. The second version +// handles everything else. The final argument only serves as discriminator and +// is unused. +template +inline +void +SetMethodAux(T recv, + v8::Local name, + v8::Local tpl, + v8::Template *) { + recv->Set(name, tpl); +} + +template +inline +void +SetMethodAux(T recv, + v8::Local name, + v8::Local tpl, + ...) { + Set(recv, name, GetFunction(tpl).ToLocalChecked()); +} + +} // end of namespace imp + +template class HandleType> +inline void SetMethod( + HandleType recv + , const char *name + , FunctionCallback callback) { + HandleScope scope; + v8::Local t = New(callback); + v8::Local fn_name = New(name).ToLocalChecked(); + t->SetClassName(fn_name); + // Note(@agnat): Pass an empty T* as discriminator. See note on + // SetMethodAux(...) above + imp::SetMethodAux(recv, fn_name, t, static_cast(0)); +} + +inline void SetPrototypeMethod( + v8::Local recv + , const char* name, FunctionCallback callback) { + HandleScope scope; + v8::Local t = New( + callback + , v8::Local() + , New(recv)); + v8::Local fn_name = New(name).ToLocalChecked(); + recv->PrototypeTemplate()->Set(fn_name, t); + t->SetClassName(fn_name); +} + +//=== Accessors and Such ======================================================= + +inline void SetAccessor( + v8::Local tpl + , v8::Local name + , GetterCallback getter + , SetterCallback setter = 0 + , v8::Local data = v8::Local() + , v8::AccessControl settings = v8::DEFAULT + , v8::PropertyAttribute attribute = v8::None + , imp::Sig signature = imp::Sig()) { + HandleScope scope; + + imp::NativeGetter getter_ = + imp::GetterCallbackWrapper; + imp::NativeSetter setter_ = + setter ? imp::SetterCallbackWrapper : 0; + + v8::Local otpl = New(); + otpl->SetInternalFieldCount(imp::kAccessorFieldCount); + v8::Local obj = NewInstance(otpl).ToLocalChecked(); + + obj->SetInternalField( + imp::kGetterIndex + , New(reinterpret_cast(getter))); + + if (setter != 0) { + obj->SetInternalField( + imp::kSetterIndex + , New(reinterpret_cast(setter))); + } + + if (!data.IsEmpty()) { + obj->SetInternalField(imp::kDataIndex, data); + } + + tpl->SetAccessor( + name + , getter_ + , setter_ + , obj + , settings + , attribute + , signature); +} + +inline bool SetAccessor( + v8::Local obj + , v8::Local name + , GetterCallback getter + , SetterCallback setter = 0 + , v8::Local data = v8::Local() + , v8::AccessControl settings = v8::DEFAULT + , v8::PropertyAttribute attribute = v8::None) { + HandleScope scope; + + imp::NativeGetter getter_ = + imp::GetterCallbackWrapper; + imp::NativeSetter setter_ = + setter ? imp::SetterCallbackWrapper : 0; + + v8::Local otpl = New(); + otpl->SetInternalFieldCount(imp::kAccessorFieldCount); + v8::Local dataobj = NewInstance(otpl).ToLocalChecked(); + + dataobj->SetInternalField( + imp::kGetterIndex + , New(reinterpret_cast(getter))); + + if (!data.IsEmpty()) { + dataobj->SetInternalField(imp::kDataIndex, data); + } + + if (setter) { + dataobj->SetInternalField( + imp::kSetterIndex + , New(reinterpret_cast(setter))); + } + +#if (NODE_MODULE_VERSION >= NODE_6_0_MODULE_VERSION) + return obj->SetAccessor( + GetCurrentContext() + , name + , getter_ + , setter_ + , dataobj + , settings + , attribute).FromMaybe(false); +#else + return obj->SetAccessor( + name + , getter_ + , setter_ + , dataobj + , settings + , attribute); +#endif +} + +inline void SetNamedPropertyHandler( + v8::Local tpl + , PropertyGetterCallback getter + , PropertySetterCallback setter = 0 + , PropertyQueryCallback query = 0 + , PropertyDeleterCallback deleter = 0 + , PropertyEnumeratorCallback enumerator = 0 + , v8::Local data = v8::Local()) { + HandleScope scope; + + imp::NativePropertyGetter getter_ = + imp::PropertyGetterCallbackWrapper; + imp::NativePropertySetter setter_ = + setter ? imp::PropertySetterCallbackWrapper : 0; + imp::NativePropertyQuery query_ = + query ? imp::PropertyQueryCallbackWrapper : 0; + imp::NativePropertyDeleter *deleter_ = + deleter ? imp::PropertyDeleterCallbackWrapper : 0; + imp::NativePropertyEnumerator enumerator_ = + enumerator ? imp::PropertyEnumeratorCallbackWrapper : 0; + + v8::Local otpl = New(); + otpl->SetInternalFieldCount(imp::kPropertyFieldCount); + v8::Local obj = NewInstance(otpl).ToLocalChecked(); + obj->SetInternalField( + imp::kPropertyGetterIndex + , New(reinterpret_cast(getter))); + + if (setter) { + obj->SetInternalField( + imp::kPropertySetterIndex + , New(reinterpret_cast(setter))); + } + + if (query) { + obj->SetInternalField( + imp::kPropertyQueryIndex + , New(reinterpret_cast(query))); + } + + if (deleter) { + obj->SetInternalField( + imp::kPropertyDeleterIndex + , New(reinterpret_cast(deleter))); + } + + if (enumerator) { + obj->SetInternalField( + imp::kPropertyEnumeratorIndex + , New(reinterpret_cast(enumerator))); + } + + if (!data.IsEmpty()) { + obj->SetInternalField(imp::kDataIndex, data); + } + +#if NODE_MODULE_VERSION > NODE_0_12_MODULE_VERSION + tpl->SetHandler(v8::NamedPropertyHandlerConfiguration( + getter_, setter_, query_, deleter_, enumerator_, obj)); +#else + tpl->SetNamedPropertyHandler( + getter_ + , setter_ + , query_ + , deleter_ + , enumerator_ + , obj); +#endif +} + +inline void SetIndexedPropertyHandler( + v8::Local tpl + , IndexGetterCallback getter + , IndexSetterCallback setter = 0 + , IndexQueryCallback query = 0 + , IndexDeleterCallback deleter = 0 + , IndexEnumeratorCallback enumerator = 0 + , v8::Local data = v8::Local()) { + HandleScope scope; + + imp::NativeIndexGetter getter_ = + imp::IndexGetterCallbackWrapper; + imp::NativeIndexSetter setter_ = + setter ? imp::IndexSetterCallbackWrapper : 0; + imp::NativeIndexQuery query_ = + query ? imp::IndexQueryCallbackWrapper : 0; + imp::NativeIndexDeleter deleter_ = + deleter ? imp::IndexDeleterCallbackWrapper : 0; + imp::NativeIndexEnumerator enumerator_ = + enumerator ? imp::IndexEnumeratorCallbackWrapper : 0; + + v8::Local otpl = New(); + otpl->SetInternalFieldCount(imp::kIndexPropertyFieldCount); + v8::Local obj = NewInstance(otpl).ToLocalChecked(); + obj->SetInternalField( + imp::kIndexPropertyGetterIndex + , New(reinterpret_cast(getter))); + + if (setter) { + obj->SetInternalField( + imp::kIndexPropertySetterIndex + , New(reinterpret_cast(setter))); + } + + if (query) { + obj->SetInternalField( + imp::kIndexPropertyQueryIndex + , New(reinterpret_cast(query))); + } + + if (deleter) { + obj->SetInternalField( + imp::kIndexPropertyDeleterIndex + , New(reinterpret_cast(deleter))); + } + + if (enumerator) { + obj->SetInternalField( + imp::kIndexPropertyEnumeratorIndex + , New(reinterpret_cast(enumerator))); + } + + if (!data.IsEmpty()) { + obj->SetInternalField(imp::kDataIndex, data); + } + +#if NODE_MODULE_VERSION > NODE_0_12_MODULE_VERSION + tpl->SetHandler(v8::IndexedPropertyHandlerConfiguration( + getter_, setter_, query_, deleter_, enumerator_, obj)); +#else + tpl->SetIndexedPropertyHandler( + getter_ + , setter_ + , query_ + , deleter_ + , enumerator_ + , obj); +#endif +} + +inline void SetCallHandler( + v8::Local tpl + , FunctionCallback callback + , v8::Local data = v8::Local()) { + HandleScope scope; + + v8::Local otpl = New(); + otpl->SetInternalFieldCount(imp::kFunctionFieldCount); + v8::Local obj = NewInstance(otpl).ToLocalChecked(); + + obj->SetInternalField( + imp::kFunctionIndex + , New(reinterpret_cast(callback))); + + if (!data.IsEmpty()) { + obj->SetInternalField(imp::kDataIndex, data); + } + + tpl->SetCallHandler(imp::FunctionCallbackWrapper, obj); +} + + +inline void SetCallAsFunctionHandler( + v8::Local tpl, + FunctionCallback callback, + v8::Local data = v8::Local()) { + HandleScope scope; + + v8::Local otpl = New(); + otpl->SetInternalFieldCount(imp::kFunctionFieldCount); + v8::Local obj = NewInstance(otpl).ToLocalChecked(); + + obj->SetInternalField( + imp::kFunctionIndex + , New(reinterpret_cast(callback))); + + if (!data.IsEmpty()) { + obj->SetInternalField(imp::kDataIndex, data); + } + + tpl->SetCallAsFunctionHandler(imp::FunctionCallbackWrapper, obj); +} + +//=== Weak Persistent Handling ================================================= + +#include "nan_weak.h" // NOLINT(build/include) + +//=== ObjectWrap =============================================================== + +#include "nan_object_wrap.h" // NOLINT(build/include) + +//=== HiddenValue/Private ====================================================== + +#include "nan_private.h" // NOLINT(build/include) + +//=== Export ================================================================== + +inline +void +Export(ADDON_REGISTER_FUNCTION_ARGS_TYPE target, const char *name, + FunctionCallback f) { + HandleScope scope; + + Set(target, New(name).ToLocalChecked(), + GetFunction(New(f)).ToLocalChecked()); +} + +//=== Tap Reverse Binding ===================================================== + +struct Tap { + explicit Tap(v8::Local t) : t_() { + HandleScope scope; + + t_.Reset(To(t).ToLocalChecked()); + } + + ~Tap() { t_.Reset(); } // not sure if neccessary + + inline void plan(int i) { + HandleScope scope; + v8::Local arg = New(i); + Call("plan", New(t_), 1, &arg); + } + + inline void ok(bool isOk, const char *msg = NULL) { + HandleScope scope; + v8::Local args[2]; + args[0] = New(isOk); + if (msg) args[1] = New(msg).ToLocalChecked(); + Call("ok", New(t_), msg ? 2 : 1, args); + } + + inline void pass(const char * msg = NULL) { + HandleScope scope; + v8::Local hmsg; + if (msg) hmsg = New(msg).ToLocalChecked(); + Call("pass", New(t_), msg ? 1 : 0, &hmsg); + } + + inline void end() { + HandleScope scope; + Call("end", New(t_), 0, NULL); + } + + private: + Persistent t_; +}; + +#define NAN_STRINGIZE2(x) #x +#define NAN_STRINGIZE(x) NAN_STRINGIZE2(x) +#define NAN_TEST_EXPRESSION(expression) \ + ( expression ), __FILE__ ":" NAN_STRINGIZE(__LINE__) ": " #expression + +#define NAN_EXPORT(target, function) Export(target, #function, function) + +#undef TYPE_CHECK + +//=== Generic Maybefication =================================================== + +namespace imp { + +template struct Maybefier; + +template struct Maybefier > { + inline static MaybeLocal convert(v8::Local v) { + return v; + } +}; + +template struct Maybefier > { + inline static MaybeLocal convert(MaybeLocal v) { + return v; + } +}; + +} // end of namespace imp + +template class MaybeMaybe> +inline MaybeLocal +MakeMaybe(MaybeMaybe v) { + return imp::Maybefier >::convert(v); +} + +//=== TypedArrayContents ======================================================= + +#include "nan_typedarray_contents.h" // NOLINT(build/include) + +//=== JSON ===================================================================== + +#include "nan_json.h" // NOLINT(build/include) + +} // end of namespace Nan + +#endif // NAN_H_ diff --git a/node_modules/nan/nan_callbacks.h b/node_modules/nan/nan_callbacks.h new file mode 100644 index 00000000..53ede846 --- /dev/null +++ b/node_modules/nan/nan_callbacks.h @@ -0,0 +1,88 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_CALLBACKS_H_ +#define NAN_CALLBACKS_H_ + +template class FunctionCallbackInfo; +template class PropertyCallbackInfo; +template class Global; + +typedef void(*FunctionCallback)(const FunctionCallbackInfo&); +typedef void(*GetterCallback) + (v8::Local, const PropertyCallbackInfo&); +typedef void(*SetterCallback)( + v8::Local, + v8::Local, + const PropertyCallbackInfo&); +typedef void(*PropertyGetterCallback)( + v8::Local, + const PropertyCallbackInfo&); +typedef void(*PropertySetterCallback)( + v8::Local, + v8::Local, + const PropertyCallbackInfo&); +typedef void(*PropertyEnumeratorCallback) + (const PropertyCallbackInfo&); +typedef void(*PropertyDeleterCallback)( + v8::Local, + const PropertyCallbackInfo&); +typedef void(*PropertyQueryCallback)( + v8::Local, + const PropertyCallbackInfo&); +typedef void(*IndexGetterCallback)( + uint32_t, + const PropertyCallbackInfo&); +typedef void(*IndexSetterCallback)( + uint32_t, + v8::Local, + const PropertyCallbackInfo&); +typedef void(*IndexEnumeratorCallback) + (const PropertyCallbackInfo&); +typedef void(*IndexDeleterCallback)( + uint32_t, + const PropertyCallbackInfo&); +typedef void(*IndexQueryCallback)( + uint32_t, + const PropertyCallbackInfo&); + +namespace imp { +typedef v8::Local Sig; + +static const int kDataIndex = 0; + +static const int kFunctionIndex = 1; +static const int kFunctionFieldCount = 2; + +static const int kGetterIndex = 1; +static const int kSetterIndex = 2; +static const int kAccessorFieldCount = 3; + +static const int kPropertyGetterIndex = 1; +static const int kPropertySetterIndex = 2; +static const int kPropertyEnumeratorIndex = 3; +static const int kPropertyDeleterIndex = 4; +static const int kPropertyQueryIndex = 5; +static const int kPropertyFieldCount = 6; + +static const int kIndexPropertyGetterIndex = 1; +static const int kIndexPropertySetterIndex = 2; +static const int kIndexPropertyEnumeratorIndex = 3; +static const int kIndexPropertyDeleterIndex = 4; +static const int kIndexPropertyQueryIndex = 5; +static const int kIndexPropertyFieldCount = 6; + +} // end of namespace imp + +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION +# include "nan_callbacks_12_inl.h" // NOLINT(build/include) +#else +# include "nan_callbacks_pre_12_inl.h" // NOLINT(build/include) +#endif + +#endif // NAN_CALLBACKS_H_ diff --git a/node_modules/nan/nan_callbacks_12_inl.h b/node_modules/nan/nan_callbacks_12_inl.h new file mode 100644 index 00000000..c27b18d8 --- /dev/null +++ b/node_modules/nan/nan_callbacks_12_inl.h @@ -0,0 +1,514 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_CALLBACKS_12_INL_H_ +#define NAN_CALLBACKS_12_INL_H_ + +template +class ReturnValue { + v8::ReturnValue value_; + + public: + template + explicit inline ReturnValue(const v8::ReturnValue &value) : + value_(value) {} + template + explicit inline ReturnValue(const ReturnValue& that) + : value_(that.value_) { + TYPE_CHECK(T, S); + } + + // Handle setters + template inline void Set(const v8::Local &handle) { + TYPE_CHECK(T, S); + value_.Set(handle); + } + + template inline void Set(const Global &handle) { + TYPE_CHECK(T, S); +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && \ + (V8_MINOR_VERSION > 5 || (V8_MINOR_VERSION == 5 && \ + defined(V8_BUILD_NUMBER) && V8_BUILD_NUMBER >= 8)))) + value_.Set(handle); +#else + value_.Set(*reinterpret_cast*>(&handle)); + const_cast &>(handle).Reset(); +#endif + } + + // Fast primitive setters + inline void Set(bool value) { + TYPE_CHECK(T, v8::Boolean); + value_.Set(value); + } + + inline void Set(double i) { + TYPE_CHECK(T, v8::Number); + value_.Set(i); + } + + inline void Set(int32_t i) { + TYPE_CHECK(T, v8::Integer); + value_.Set(i); + } + + inline void Set(uint32_t i) { + TYPE_CHECK(T, v8::Integer); + value_.Set(i); + } + + // Fast JS primitive setters + inline void SetNull() { + TYPE_CHECK(T, v8::Primitive); + value_.SetNull(); + } + + inline void SetUndefined() { + TYPE_CHECK(T, v8::Primitive); + value_.SetUndefined(); + } + + inline void SetEmptyString() { + TYPE_CHECK(T, v8::String); + value_.SetEmptyString(); + } + + // Convenience getter for isolate + inline v8::Isolate *GetIsolate() const { + return value_.GetIsolate(); + } + + // Pointer setter: Uncompilable to prevent inadvertent misuse. + template + inline void Set(S *whatever) { TYPE_CHECK(S*, v8::Primitive); } +}; + +template +class FunctionCallbackInfo { + const v8::FunctionCallbackInfo &info_; + const v8::Local data_; + + public: + explicit inline FunctionCallbackInfo( + const v8::FunctionCallbackInfo &info + , v8::Local data) : + info_(info) + , data_(data) {} + + inline ReturnValue GetReturnValue() const { + return ReturnValue(info_.GetReturnValue()); + } + +#if NODE_MAJOR_VERSION < 10 + inline v8::Local Callee() const { return info_.Callee(); } +#endif + inline v8::Local Data() const { return data_; } + inline v8::Local Holder() const { return info_.Holder(); } + inline bool IsConstructCall() const { return info_.IsConstructCall(); } + inline int Length() const { return info_.Length(); } + inline v8::Local operator[](int i) const { return info_[i]; } + inline v8::Local This() const { return info_.This(); } + inline v8::Isolate *GetIsolate() const { return info_.GetIsolate(); } + + + protected: + static const int kHolderIndex = 0; + static const int kIsolateIndex = 1; + static const int kReturnValueDefaultValueIndex = 2; + static const int kReturnValueIndex = 3; + static const int kDataIndex = 4; + static const int kCalleeIndex = 5; + static const int kContextSaveIndex = 6; + static const int kArgsLength = 7; + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(FunctionCallbackInfo) +}; + +template +class PropertyCallbackInfo { + const v8::PropertyCallbackInfo &info_; + const v8::Local data_; + + public: + explicit inline PropertyCallbackInfo( + const v8::PropertyCallbackInfo &info + , const v8::Local data) : + info_(info) + , data_(data) {} + + inline v8::Isolate* GetIsolate() const { return info_.GetIsolate(); } + inline v8::Local Data() const { return data_; } + inline v8::Local This() const { return info_.This(); } + inline v8::Local Holder() const { return info_.Holder(); } + inline ReturnValue GetReturnValue() const { + return ReturnValue(info_.GetReturnValue()); + } + + protected: + static const int kHolderIndex = 0; + static const int kIsolateIndex = 1; + static const int kReturnValueDefaultValueIndex = 2; + static const int kReturnValueIndex = 3; + static const int kDataIndex = 4; + static const int kThisIndex = 5; + static const int kArgsLength = 6; + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(PropertyCallbackInfo) +}; + +namespace imp { +static +void FunctionCallbackWrapper(const v8::FunctionCallbackInfo &info) { + v8::Local obj = info.Data().As(); + FunctionCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kFunctionIndex).As()->Value())); + FunctionCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + callback(cbinfo); +} + +typedef void (*NativeFunction)(const v8::FunctionCallbackInfo &); + +#if NODE_MODULE_VERSION > NODE_0_12_MODULE_VERSION +static +void GetterCallbackWrapper( + v8::Local property + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + GetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kGetterIndex).As()->Value())); + callback(property.As(), cbinfo); +} + +typedef void (*NativeGetter) + (v8::Local, const v8::PropertyCallbackInfo &); + +static +void SetterCallbackWrapper( + v8::Local property + , v8::Local value + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + SetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kSetterIndex).As()->Value())); + callback(property.As(), value, cbinfo); +} + +typedef void (*NativeSetter)( + v8::Local + , v8::Local + , const v8::PropertyCallbackInfo &); +#else +static +void GetterCallbackWrapper( + v8::Local property + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + GetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kGetterIndex).As()->Value())); + callback(property, cbinfo); +} + +typedef void (*NativeGetter) + (v8::Local, const v8::PropertyCallbackInfo &); + +static +void SetterCallbackWrapper( + v8::Local property + , v8::Local value + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + SetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kSetterIndex).As()->Value())); + callback(property, value, cbinfo); +} + +typedef void (*NativeSetter)( + v8::Local + , v8::Local + , const v8::PropertyCallbackInfo &); +#endif + +#if NODE_MODULE_VERSION > NODE_0_12_MODULE_VERSION +static +void PropertyGetterCallbackWrapper( + v8::Local property + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyGetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertyGetterIndex) + .As()->Value())); + callback(property.As(), cbinfo); +} + +typedef void (*NativePropertyGetter) + (v8::Local, const v8::PropertyCallbackInfo &); + +static +void PropertySetterCallbackWrapper( + v8::Local property + , v8::Local value + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertySetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertySetterIndex) + .As()->Value())); + callback(property.As(), value, cbinfo); +} + +typedef void (*NativePropertySetter)( + v8::Local + , v8::Local + , const v8::PropertyCallbackInfo &); + +static +void PropertyEnumeratorCallbackWrapper( + const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyEnumeratorCallback callback = + reinterpret_cast(reinterpret_cast( + obj->GetInternalField(kPropertyEnumeratorIndex) + .As()->Value())); + callback(cbinfo); +} + +typedef void (*NativePropertyEnumerator) + (const v8::PropertyCallbackInfo &); + +static +void PropertyDeleterCallbackWrapper( + v8::Local property + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyDeleterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertyDeleterIndex) + .As()->Value())); + callback(property.As(), cbinfo); +} + +typedef void (NativePropertyDeleter) + (v8::Local, const v8::PropertyCallbackInfo &); + +static +void PropertyQueryCallbackWrapper( + v8::Local property + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyQueryCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertyQueryIndex) + .As()->Value())); + callback(property.As(), cbinfo); +} + +typedef void (*NativePropertyQuery) + (v8::Local, const v8::PropertyCallbackInfo &); +#else +static +void PropertyGetterCallbackWrapper( + v8::Local property + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyGetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertyGetterIndex) + .As()->Value())); + callback(property, cbinfo); +} + +typedef void (*NativePropertyGetter) + (v8::Local, const v8::PropertyCallbackInfo &); + +static +void PropertySetterCallbackWrapper( + v8::Local property + , v8::Local value + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertySetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertySetterIndex) + .As()->Value())); + callback(property, value, cbinfo); +} + +typedef void (*NativePropertySetter)( + v8::Local + , v8::Local + , const v8::PropertyCallbackInfo &); + +static +void PropertyEnumeratorCallbackWrapper( + const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyEnumeratorCallback callback = + reinterpret_cast(reinterpret_cast( + obj->GetInternalField(kPropertyEnumeratorIndex) + .As()->Value())); + callback(cbinfo); +} + +typedef void (*NativePropertyEnumerator) + (const v8::PropertyCallbackInfo &); + +static +void PropertyDeleterCallbackWrapper( + v8::Local property + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyDeleterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertyDeleterIndex) + .As()->Value())); + callback(property, cbinfo); +} + +typedef void (NativePropertyDeleter) + (v8::Local, const v8::PropertyCallbackInfo &); + +static +void PropertyQueryCallbackWrapper( + v8::Local property + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyQueryCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertyQueryIndex) + .As()->Value())); + callback(property, cbinfo); +} + +typedef void (*NativePropertyQuery) + (v8::Local, const v8::PropertyCallbackInfo &); +#endif + +static +void IndexGetterCallbackWrapper( + uint32_t index, const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexGetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kIndexPropertyGetterIndex) + .As()->Value())); + callback(index, cbinfo); +} + +typedef void (*NativeIndexGetter) + (uint32_t, const v8::PropertyCallbackInfo &); + +static +void IndexSetterCallbackWrapper( + uint32_t index + , v8::Local value + , const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexSetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kIndexPropertySetterIndex) + .As()->Value())); + callback(index, value, cbinfo); +} + +typedef void (*NativeIndexSetter)( + uint32_t + , v8::Local + , const v8::PropertyCallbackInfo &); + +static +void IndexEnumeratorCallbackWrapper( + const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexEnumeratorCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField( + kIndexPropertyEnumeratorIndex).As()->Value())); + callback(cbinfo); +} + +typedef void (*NativeIndexEnumerator) + (const v8::PropertyCallbackInfo &); + +static +void IndexDeleterCallbackWrapper( + uint32_t index, const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexDeleterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kIndexPropertyDeleterIndex) + .As()->Value())); + callback(index, cbinfo); +} + +typedef void (*NativeIndexDeleter) + (uint32_t, const v8::PropertyCallbackInfo &); + +static +void IndexQueryCallbackWrapper( + uint32_t index, const v8::PropertyCallbackInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexQueryCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kIndexPropertyQueryIndex) + .As()->Value())); + callback(index, cbinfo); +} + +typedef void (*NativeIndexQuery) + (uint32_t, const v8::PropertyCallbackInfo &); +} // end of namespace imp + +#endif // NAN_CALLBACKS_12_INL_H_ diff --git a/node_modules/nan/nan_callbacks_pre_12_inl.h b/node_modules/nan/nan_callbacks_pre_12_inl.h new file mode 100644 index 00000000..c9ba4993 --- /dev/null +++ b/node_modules/nan/nan_callbacks_pre_12_inl.h @@ -0,0 +1,520 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_CALLBACKS_PRE_12_INL_H_ +#define NAN_CALLBACKS_PRE_12_INL_H_ + +namespace imp { +template class ReturnValueImp; +} // end of namespace imp + +template +class ReturnValue { + v8::Isolate *isolate_; + v8::Persistent *value_; + friend class imp::ReturnValueImp; + + public: + template + explicit inline ReturnValue(v8::Isolate *isolate, v8::Persistent *p) : + isolate_(isolate), value_(p) {} + template + explicit inline ReturnValue(const ReturnValue& that) + : isolate_(that.isolate_), value_(that.value_) { + TYPE_CHECK(T, S); + } + + // Handle setters + template inline void Set(const v8::Local &handle) { + TYPE_CHECK(T, S); + value_->Dispose(); + *value_ = v8::Persistent::New(handle); + } + + template inline void Set(const Global &handle) { + TYPE_CHECK(T, S); + value_->Dispose(); + *value_ = v8::Persistent::New(handle.persistent); + const_cast &>(handle).Reset(); + } + + // Fast primitive setters + inline void Set(bool value) { + v8::HandleScope scope; + + TYPE_CHECK(T, v8::Boolean); + value_->Dispose(); + *value_ = v8::Persistent::New(v8::Boolean::New(value)); + } + + inline void Set(double i) { + v8::HandleScope scope; + + TYPE_CHECK(T, v8::Number); + value_->Dispose(); + *value_ = v8::Persistent::New(v8::Number::New(i)); + } + + inline void Set(int32_t i) { + v8::HandleScope scope; + + TYPE_CHECK(T, v8::Integer); + value_->Dispose(); + *value_ = v8::Persistent::New(v8::Int32::New(i)); + } + + inline void Set(uint32_t i) { + v8::HandleScope scope; + + TYPE_CHECK(T, v8::Integer); + value_->Dispose(); + *value_ = v8::Persistent::New(v8::Uint32::NewFromUnsigned(i)); + } + + // Fast JS primitive setters + inline void SetNull() { + v8::HandleScope scope; + + TYPE_CHECK(T, v8::Primitive); + value_->Dispose(); + *value_ = v8::Persistent::New(v8::Null()); + } + + inline void SetUndefined() { + v8::HandleScope scope; + + TYPE_CHECK(T, v8::Primitive); + value_->Dispose(); + *value_ = v8::Persistent::New(v8::Undefined()); + } + + inline void SetEmptyString() { + v8::HandleScope scope; + + TYPE_CHECK(T, v8::String); + value_->Dispose(); + *value_ = v8::Persistent::New(v8::String::Empty()); + } + + // Convenience getter for isolate + inline v8::Isolate *GetIsolate() const { + return isolate_; + } + + // Pointer setter: Uncompilable to prevent inadvertent misuse. + template + inline void Set(S *whatever) { TYPE_CHECK(S*, v8::Primitive); } +}; + +template +class FunctionCallbackInfo { + const v8::Arguments &args_; + v8::Local data_; + ReturnValue return_value_; + v8::Persistent retval_; + + public: + explicit inline FunctionCallbackInfo( + const v8::Arguments &args + , v8::Local data) : + args_(args) + , data_(data) + , return_value_(args.GetIsolate(), &retval_) + , retval_(v8::Persistent::New(v8::Undefined())) {} + + inline ~FunctionCallbackInfo() { + retval_.Dispose(); + retval_.Clear(); + } + + inline ReturnValue GetReturnValue() const { + return ReturnValue(return_value_); + } + + inline v8::Local Callee() const { return args_.Callee(); } + inline v8::Local Data() const { return data_; } + inline v8::Local Holder() const { return args_.Holder(); } + inline bool IsConstructCall() const { return args_.IsConstructCall(); } + inline int Length() const { return args_.Length(); } + inline v8::Local operator[](int i) const { return args_[i]; } + inline v8::Local This() const { return args_.This(); } + inline v8::Isolate *GetIsolate() const { return args_.GetIsolate(); } + + + protected: + static const int kHolderIndex = 0; + static const int kIsolateIndex = 1; + static const int kReturnValueDefaultValueIndex = 2; + static const int kReturnValueIndex = 3; + static const int kDataIndex = 4; + static const int kCalleeIndex = 5; + static const int kContextSaveIndex = 6; + static const int kArgsLength = 7; + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(FunctionCallbackInfo) +}; + +template +class PropertyCallbackInfoBase { + const v8::AccessorInfo &info_; + const v8::Local data_; + + public: + explicit inline PropertyCallbackInfoBase( + const v8::AccessorInfo &info + , const v8::Local data) : + info_(info) + , data_(data) {} + + inline v8::Isolate* GetIsolate() const { return info_.GetIsolate(); } + inline v8::Local Data() const { return data_; } + inline v8::Local This() const { return info_.This(); } + inline v8::Local Holder() const { return info_.Holder(); } + + protected: + static const int kHolderIndex = 0; + static const int kIsolateIndex = 1; + static const int kReturnValueDefaultValueIndex = 2; + static const int kReturnValueIndex = 3; + static const int kDataIndex = 4; + static const int kThisIndex = 5; + static const int kArgsLength = 6; + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(PropertyCallbackInfoBase) +}; + +template +class PropertyCallbackInfo : public PropertyCallbackInfoBase { + ReturnValue return_value_; + v8::Persistent retval_; + + public: + explicit inline PropertyCallbackInfo( + const v8::AccessorInfo &info + , const v8::Local data) : + PropertyCallbackInfoBase(info, data) + , return_value_(info.GetIsolate(), &retval_) + , retval_(v8::Persistent::New(v8::Undefined())) {} + + inline ~PropertyCallbackInfo() { + retval_.Dispose(); + retval_.Clear(); + } + + inline ReturnValue GetReturnValue() const { return return_value_; } +}; + +template<> +class PropertyCallbackInfo : + public PropertyCallbackInfoBase { + ReturnValue return_value_; + v8::Persistent retval_; + + public: + explicit inline PropertyCallbackInfo( + const v8::AccessorInfo &info + , const v8::Local data) : + PropertyCallbackInfoBase(info, data) + , return_value_(info.GetIsolate(), &retval_) + , retval_(v8::Persistent::New(v8::Local())) {} + + inline ~PropertyCallbackInfo() { + retval_.Dispose(); + retval_.Clear(); + } + + inline ReturnValue GetReturnValue() const { + return return_value_; + } +}; + +template<> +class PropertyCallbackInfo : + public PropertyCallbackInfoBase { + ReturnValue return_value_; + v8::Persistent retval_; + + public: + explicit inline PropertyCallbackInfo( + const v8::AccessorInfo &info + , const v8::Local data) : + PropertyCallbackInfoBase(info, data) + , return_value_(info.GetIsolate(), &retval_) + , retval_(v8::Persistent::New(v8::Local())) {} + + inline ~PropertyCallbackInfo() { + retval_.Dispose(); + retval_.Clear(); + } + + inline ReturnValue GetReturnValue() const { + return return_value_; + } +}; + +template<> +class PropertyCallbackInfo : + public PropertyCallbackInfoBase { + ReturnValue return_value_; + v8::Persistent retval_; + + public: + explicit inline PropertyCallbackInfo( + const v8::AccessorInfo &info + , const v8::Local data) : + PropertyCallbackInfoBase(info, data) + , return_value_(info.GetIsolate(), &retval_) + , retval_(v8::Persistent::New(v8::Local())) {} + + inline ~PropertyCallbackInfo() { + retval_.Dispose(); + retval_.Clear(); + } + + inline ReturnValue GetReturnValue() const { + return return_value_; + } +}; + +namespace imp { +template +class ReturnValueImp : public ReturnValue { + public: + explicit ReturnValueImp(ReturnValue that) : + ReturnValue(that) {} + inline v8::Handle Value() { + return *ReturnValue::value_; + } +}; + +static +v8::Handle FunctionCallbackWrapper(const v8::Arguments &args) { + v8::Local obj = args.Data().As(); + FunctionCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kFunctionIndex).As()->Value())); + FunctionCallbackInfo + cbinfo(args, obj->GetInternalField(kDataIndex)); + callback(cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativeFunction)(const v8::Arguments &); + +static +v8::Handle GetterCallbackWrapper( + v8::Local property, const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + GetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kGetterIndex).As()->Value())); + callback(property, cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativeGetter) + (v8::Local, const v8::AccessorInfo &); + +static +void SetterCallbackWrapper( + v8::Local property + , v8::Local value + , const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + SetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kSetterIndex).As()->Value())); + callback(property, value, cbinfo); +} + +typedef void (*NativeSetter) + (v8::Local, v8::Local, const v8::AccessorInfo &); + +static +v8::Handle PropertyGetterCallbackWrapper( + v8::Local property, const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyGetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertyGetterIndex) + .As()->Value())); + callback(property, cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativePropertyGetter) + (v8::Local, const v8::AccessorInfo &); + +static +v8::Handle PropertySetterCallbackWrapper( + v8::Local property + , v8::Local value + , const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertySetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertySetterIndex) + .As()->Value())); + callback(property, value, cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativePropertySetter) + (v8::Local, v8::Local, const v8::AccessorInfo &); + +static +v8::Handle PropertyEnumeratorCallbackWrapper( + const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyEnumeratorCallback callback = + reinterpret_cast(reinterpret_cast( + obj->GetInternalField(kPropertyEnumeratorIndex) + .As()->Value())); + callback(cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativePropertyEnumerator) + (const v8::AccessorInfo &); + +static +v8::Handle PropertyDeleterCallbackWrapper( + v8::Local property + , const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyDeleterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertyDeleterIndex) + .As()->Value())); + callback(property, cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (NativePropertyDeleter) + (v8::Local, const v8::AccessorInfo &); + +static +v8::Handle PropertyQueryCallbackWrapper( + v8::Local property, const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + PropertyQueryCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kPropertyQueryIndex) + .As()->Value())); + callback(property, cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativePropertyQuery) + (v8::Local, const v8::AccessorInfo &); + +static +v8::Handle IndexGetterCallbackWrapper( + uint32_t index, const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexGetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kIndexPropertyGetterIndex) + .As()->Value())); + callback(index, cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativeIndexGetter) + (uint32_t, const v8::AccessorInfo &); + +static +v8::Handle IndexSetterCallbackWrapper( + uint32_t index + , v8::Local value + , const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexSetterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kIndexPropertySetterIndex) + .As()->Value())); + callback(index, value, cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativeIndexSetter) + (uint32_t, v8::Local, const v8::AccessorInfo &); + +static +v8::Handle IndexEnumeratorCallbackWrapper( + const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexEnumeratorCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kIndexPropertyEnumeratorIndex) + .As()->Value())); + callback(cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativeIndexEnumerator) + (const v8::AccessorInfo &); + +static +v8::Handle IndexDeleterCallbackWrapper( + uint32_t index, const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexDeleterCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kIndexPropertyDeleterIndex) + .As()->Value())); + callback(index, cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativeIndexDeleter) + (uint32_t, const v8::AccessorInfo &); + +static +v8::Handle IndexQueryCallbackWrapper( + uint32_t index, const v8::AccessorInfo &info) { + v8::Local obj = info.Data().As(); + PropertyCallbackInfo + cbinfo(info, obj->GetInternalField(kDataIndex)); + IndexQueryCallback callback = reinterpret_cast( + reinterpret_cast( + obj->GetInternalField(kIndexPropertyQueryIndex) + .As()->Value())); + callback(index, cbinfo); + return ReturnValueImp(cbinfo.GetReturnValue()).Value(); +} + +typedef v8::Handle (*NativeIndexQuery) + (uint32_t, const v8::AccessorInfo &); +} // end of namespace imp + +#endif // NAN_CALLBACKS_PRE_12_INL_H_ diff --git a/node_modules/nan/nan_converters.h b/node_modules/nan/nan_converters.h new file mode 100644 index 00000000..c0b32729 --- /dev/null +++ b/node_modules/nan/nan_converters.h @@ -0,0 +1,72 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_CONVERTERS_H_ +#define NAN_CONVERTERS_H_ + +namespace imp { +template struct ToFactoryBase { + typedef MaybeLocal return_t; +}; +template struct ValueFactoryBase { typedef Maybe return_t; }; + +template struct ToFactory; + +template<> +struct ToFactory : ToFactoryBase { + static inline return_t convert(v8::Local val) { + if (val.IsEmpty() || !val->IsFunction()) return MaybeLocal(); + return MaybeLocal(val.As()); + } +}; + +#define X(TYPE) \ + template<> \ + struct ToFactory : ToFactoryBase { \ + static inline return_t convert(v8::Local val); \ + }; + +X(Boolean) +X(Number) +X(String) +X(Object) +X(Integer) +X(Uint32) +X(Int32) + +#undef X + +#define X(TYPE) \ + template<> \ + struct ToFactory : ValueFactoryBase { \ + static inline return_t convert(v8::Local val); \ + }; + +X(bool) +X(double) +X(int64_t) +X(uint32_t) +X(int32_t) + +#undef X +} // end of namespace imp + +template +inline +typename imp::ToFactory::return_t To(v8::Local val) { + return imp::ToFactory::convert(val); +} + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +# include "nan_converters_43_inl.h" +#else +# include "nan_converters_pre_43_inl.h" +#endif + +#endif // NAN_CONVERTERS_H_ diff --git a/node_modules/nan/nan_converters_43_inl.h b/node_modules/nan/nan_converters_43_inl.h new file mode 100644 index 00000000..41b72deb --- /dev/null +++ b/node_modules/nan/nan_converters_43_inl.h @@ -0,0 +1,68 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_CONVERTERS_43_INL_H_ +#define NAN_CONVERTERS_43_INL_H_ + +#define X(TYPE) \ +imp::ToFactory::return_t \ +imp::ToFactory::convert(v8::Local val) { \ + v8::Isolate *isolate = v8::Isolate::GetCurrent(); \ + v8::EscapableHandleScope scope(isolate); \ + return scope.Escape( \ + val->To ## TYPE(isolate->GetCurrentContext()) \ + .FromMaybe(v8::Local())); \ +} + +X(Number) +X(String) +X(Object) +X(Integer) +X(Uint32) +X(Int32) +// V8 <= 7.0 +#if V8_MAJOR_VERSION < 7 || (V8_MAJOR_VERSION == 7 && V8_MINOR_VERSION == 0) +X(Boolean) +#else +imp::ToFactory::return_t \ +imp::ToFactory::convert(v8::Local val) { \ + v8::Isolate *isolate = v8::Isolate::GetCurrent(); \ + v8::EscapableHandleScope scope(isolate); \ + return scope.Escape(val->ToBoolean(isolate)); \ +} +#endif + +#undef X + +#define X(TYPE, NAME) \ +imp::ToFactory::return_t \ +imp::ToFactory::convert(v8::Local val) { \ + v8::Isolate *isolate = v8::Isolate::GetCurrent(); \ + v8::HandleScope scope(isolate); \ + return val->NAME ## Value(isolate->GetCurrentContext()); \ +} + +X(double, Number) +X(int64_t, Integer) +X(uint32_t, Uint32) +X(int32_t, Int32) +// V8 <= 7.0 +#if V8_MAJOR_VERSION < 7 || (V8_MAJOR_VERSION == 7 && V8_MINOR_VERSION == 0) +X(bool, Boolean) +#else +imp::ToFactory::return_t \ +imp::ToFactory::convert(v8::Local val) { \ + v8::Isolate *isolate = v8::Isolate::GetCurrent(); \ + v8::HandleScope scope(isolate); \ + return Just(val->BooleanValue(isolate)); \ +} +#endif + +#undef X + +#endif // NAN_CONVERTERS_43_INL_H_ diff --git a/node_modules/nan/nan_converters_pre_43_inl.h b/node_modules/nan/nan_converters_pre_43_inl.h new file mode 100644 index 00000000..ae0518aa --- /dev/null +++ b/node_modules/nan/nan_converters_pre_43_inl.h @@ -0,0 +1,42 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_CONVERTERS_PRE_43_INL_H_ +#define NAN_CONVERTERS_PRE_43_INL_H_ + +#define X(TYPE) \ +imp::ToFactory::return_t \ +imp::ToFactory::convert(v8::Local val) { \ + return val->To ## TYPE(); \ +} + +X(Boolean) +X(Number) +X(String) +X(Object) +X(Integer) +X(Uint32) +X(Int32) + +#undef X + +#define X(TYPE, NAME) \ +imp::ToFactory::return_t \ +imp::ToFactory::convert(v8::Local val) { \ + return Just(val->NAME ## Value()); \ +} + +X(bool, Boolean) +X(double, Number) +X(int64_t, Integer) +X(uint32_t, Uint32) +X(int32_t, Int32) + +#undef X + +#endif // NAN_CONVERTERS_PRE_43_INL_H_ diff --git a/node_modules/nan/nan_define_own_property_helper.h b/node_modules/nan/nan_define_own_property_helper.h new file mode 100644 index 00000000..d710ef22 --- /dev/null +++ b/node_modules/nan/nan_define_own_property_helper.h @@ -0,0 +1,29 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_DEFINE_OWN_PROPERTY_HELPER_H_ +#define NAN_DEFINE_OWN_PROPERTY_HELPER_H_ + +namespace imp { + +inline Maybe DefineOwnPropertyHelper( + v8::PropertyAttribute current + , v8::Handle obj + , v8::Handle key + , v8::Handle value + , v8::PropertyAttribute attribs = v8::None) { + return !(current & v8::DontDelete) || // configurable OR + (!(current & v8::ReadOnly) && // writable AND + !((attribs ^ current) & ~v8::ReadOnly)) // same excluding RO + ? Just(obj->ForceSet(key, value, attribs)) + : Nothing(); +} + +} // end of namespace imp + +#endif // NAN_DEFINE_OWN_PROPERTY_HELPER_H_ diff --git a/node_modules/nan/nan_implementation_12_inl.h b/node_modules/nan/nan_implementation_12_inl.h new file mode 100644 index 00000000..255293ac --- /dev/null +++ b/node_modules/nan/nan_implementation_12_inl.h @@ -0,0 +1,430 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_IMPLEMENTATION_12_INL_H_ +#define NAN_IMPLEMENTATION_12_INL_H_ +//============================================================================== +// node v0.11 implementation +//============================================================================== + +namespace imp { + +//=== Array ==================================================================== + +Factory::return_t +Factory::New() { + return v8::Array::New(v8::Isolate::GetCurrent()); +} + +Factory::return_t +Factory::New(int length) { + return v8::Array::New(v8::Isolate::GetCurrent(), length); +} + +//=== Boolean ================================================================== + +Factory::return_t +Factory::New(bool value) { + return v8::Boolean::New(v8::Isolate::GetCurrent(), value); +} + +//=== Boolean Object =========================================================== + +Factory::return_t +Factory::New(bool value) { +#if (NODE_MODULE_VERSION >= NODE_6_0_MODULE_VERSION) + return v8::BooleanObject::New( + v8::Isolate::GetCurrent(), value).As(); +#else + return v8::BooleanObject::New(value).As(); +#endif +} + +//=== Context ================================================================== + +Factory::return_t +Factory::New( v8::ExtensionConfiguration* extensions + , v8::Local tmpl + , v8::Local obj) { + return v8::Context::New(v8::Isolate::GetCurrent(), extensions, tmpl, obj); +} + +//=== Date ===================================================================== + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +Factory::return_t +Factory::New(double value) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(v8::Date::New(isolate->GetCurrentContext(), value) + .FromMaybe(v8::Local()).As()); +} +#else +Factory::return_t +Factory::New(double value) { + return v8::Date::New(v8::Isolate::GetCurrent(), value).As(); +} +#endif + +//=== External ================================================================= + +Factory::return_t +Factory::New(void * value) { + return v8::External::New(v8::Isolate::GetCurrent(), value); +} + +//=== Function ================================================================= + +Factory::return_t +Factory::New( FunctionCallback callback + , v8::Local data) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + v8::Local tpl = v8::ObjectTemplate::New(isolate); + tpl->SetInternalFieldCount(imp::kFunctionFieldCount); + v8::Local obj = NewInstance(tpl).ToLocalChecked(); + + obj->SetInternalField( + imp::kFunctionIndex + , v8::External::New(isolate, reinterpret_cast(callback))); + + v8::Local val = v8::Local::New(isolate, data); + + if (!val.IsEmpty()) { + obj->SetInternalField(imp::kDataIndex, val); + } + +#if NODE_MAJOR_VERSION >= 10 + v8::Local context = isolate->GetCurrentContext(); + v8::Local function = + v8::Function::New(context, imp::FunctionCallbackWrapper, obj) + .ToLocalChecked(); +#else + v8::Local function = + v8::Function::New(isolate, imp::FunctionCallbackWrapper, obj); +#endif + + return scope.Escape(function); +} + +//=== Function Template ======================================================== + +Factory::return_t +Factory::New( FunctionCallback callback + , v8::Local data + , v8::Local signature) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + if (callback) { + v8::EscapableHandleScope scope(isolate); + v8::Local tpl = v8::ObjectTemplate::New(isolate); + tpl->SetInternalFieldCount(imp::kFunctionFieldCount); + v8::Local obj = NewInstance(tpl).ToLocalChecked(); + + obj->SetInternalField( + imp::kFunctionIndex + , v8::External::New(isolate, reinterpret_cast(callback))); + v8::Local val = v8::Local::New(isolate, data); + + if (!val.IsEmpty()) { + obj->SetInternalField(imp::kDataIndex, val); + } + + return scope.Escape(v8::FunctionTemplate::New( isolate + , imp::FunctionCallbackWrapper + , obj + , signature)); + } else { + return v8::FunctionTemplate::New(isolate, 0, data, signature); + } +} + +//=== Number =================================================================== + +Factory::return_t +Factory::New(double value) { + return v8::Number::New(v8::Isolate::GetCurrent(), value); +} + +//=== Number Object ============================================================ + +Factory::return_t +Factory::New(double value) { + return v8::NumberObject::New( v8::Isolate::GetCurrent() + , value).As(); +} + +//=== Integer, Int32 and Uint32 ================================================ + +template +typename IntegerFactory::return_t +IntegerFactory::New(int32_t value) { + return To(T::New(v8::Isolate::GetCurrent(), value)); +} + +template +typename IntegerFactory::return_t +IntegerFactory::New(uint32_t value) { + return To(T::NewFromUnsigned(v8::Isolate::GetCurrent(), value)); +} + +Factory::return_t +Factory::New(int32_t value) { + return To( + v8::Uint32::NewFromUnsigned(v8::Isolate::GetCurrent(), value)); +} + +Factory::return_t +Factory::New(uint32_t value) { + return To( + v8::Uint32::NewFromUnsigned(v8::Isolate::GetCurrent(), value)); +} + +//=== Object =================================================================== + +Factory::return_t +Factory::New() { + return v8::Object::New(v8::Isolate::GetCurrent()); +} + +//=== Object Template ========================================================== + +Factory::return_t +Factory::New() { + return v8::ObjectTemplate::New(v8::Isolate::GetCurrent()); +} + +//=== RegExp =================================================================== + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +Factory::return_t +Factory::New( + v8::Local pattern + , v8::RegExp::Flags flags) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape( + v8::RegExp::New(isolate->GetCurrentContext(), pattern, flags) + .FromMaybe(v8::Local())); +} +#else +Factory::return_t +Factory::New( + v8::Local pattern + , v8::RegExp::Flags flags) { + return v8::RegExp::New(pattern, flags); +} +#endif + +//=== Script =================================================================== + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +Factory::return_t +Factory::New( v8::Local source) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + v8::ScriptCompiler::Source src(source); + return scope.Escape( + v8::ScriptCompiler::Compile(isolate->GetCurrentContext(), &src) + .FromMaybe(v8::Local())); +} + +Factory::return_t +Factory::New( v8::Local source + , v8::ScriptOrigin const& origin) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + v8::ScriptCompiler::Source src(source, origin); + return scope.Escape( + v8::ScriptCompiler::Compile(isolate->GetCurrentContext(), &src) + .FromMaybe(v8::Local())); +} +#else +Factory::return_t +Factory::New( v8::Local source) { + v8::ScriptCompiler::Source src(source); + return v8::ScriptCompiler::Compile(v8::Isolate::GetCurrent(), &src); +} + +Factory::return_t +Factory::New( v8::Local source + , v8::ScriptOrigin const& origin) { + v8::ScriptCompiler::Source src(source, origin); + return v8::ScriptCompiler::Compile(v8::Isolate::GetCurrent(), &src); +} +#endif + +//=== Signature ================================================================ + +Factory::return_t +Factory::New(Factory::FTH receiver) { + return v8::Signature::New(v8::Isolate::GetCurrent(), receiver); +} + +//=== String =================================================================== + +Factory::return_t +Factory::New() { + return v8::String::Empty(v8::Isolate::GetCurrent()); +} + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +Factory::return_t +Factory::New(const char * value, int length) { + return v8::String::NewFromUtf8( + v8::Isolate::GetCurrent(), value, v8::NewStringType::kNormal, length); +} + +Factory::return_t +Factory::New(std::string const& value) { + assert(value.size() <= INT_MAX && "string too long"); + return v8::String::NewFromUtf8(v8::Isolate::GetCurrent(), + value.data(), v8::NewStringType::kNormal, static_cast(value.size())); +} + +Factory::return_t +Factory::New(const uint16_t * value, int length) { + return v8::String::NewFromTwoByte(v8::Isolate::GetCurrent(), value, + v8::NewStringType::kNormal, length); +} + +Factory::return_t +Factory::New(v8::String::ExternalStringResource * value) { + return v8::String::NewExternalTwoByte(v8::Isolate::GetCurrent(), value); +} + +Factory::return_t +Factory::New(ExternalOneByteStringResource * value) { + return v8::String::NewExternalOneByte(v8::Isolate::GetCurrent(), value); +} +#else +Factory::return_t +Factory::New(const char * value, int length) { + return v8::String::NewFromUtf8(v8::Isolate::GetCurrent(), value, + v8::String::kNormalString, length); +} + +Factory::return_t +Factory::New( + std::string const& value) /* NOLINT(build/include_what_you_use) */ { + assert(value.size() <= INT_MAX && "string too long"); + return v8::String::NewFromUtf8(v8::Isolate::GetCurrent(), value.data(), + v8::String::kNormalString, + static_cast(value.size())); +} + +Factory::return_t +Factory::New(const uint16_t * value, int length) { + return v8::String::NewFromTwoByte(v8::Isolate::GetCurrent(), value, + v8::String::kNormalString, length); +} + +Factory::return_t +Factory::New(v8::String::ExternalStringResource * value) { + return v8::String::NewExternal(v8::Isolate::GetCurrent(), value); +} + +Factory::return_t +Factory::New(ExternalOneByteStringResource * value) { + return v8::String::NewExternal(v8::Isolate::GetCurrent(), value); +} +#endif + +//=== String Object ============================================================ + +// See https://github.com/nodejs/nan/pull/811#discussion_r224594980. +// Disable the warning as there is no way around it. +// TODO(bnoordhuis) Use isolate-based version in Node.js v12. +Factory::return_t +Factory::New(v8::Local value) { +// V8 > 7.0 +#if V8_MAJOR_VERSION > 7 || (V8_MAJOR_VERSION == 7 && V8_MINOR_VERSION > 0) + return v8::StringObject::New(v8::Isolate::GetCurrent(), value) + .As(); +#else +#ifdef _MSC_VER +#pragma warning(push) +#pragma warning(disable : 4996) +#endif +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#endif + return v8::StringObject::New(value).As(); +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif +#ifdef _MSC_VER +#pragma warning(pop) +#endif +#endif +} + +//=== Unbound Script =========================================================== + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +Factory::return_t +Factory::New(v8::Local source) { + v8::ScriptCompiler::Source src(source); + return v8::ScriptCompiler::CompileUnboundScript( + v8::Isolate::GetCurrent(), &src); +} + +Factory::return_t +Factory::New( v8::Local source + , v8::ScriptOrigin const& origin) { + v8::ScriptCompiler::Source src(source, origin); + return v8::ScriptCompiler::CompileUnboundScript( + v8::Isolate::GetCurrent(), &src); +} +#else +Factory::return_t +Factory::New(v8::Local source) { + v8::ScriptCompiler::Source src(source); + return v8::ScriptCompiler::CompileUnbound(v8::Isolate::GetCurrent(), &src); +} + +Factory::return_t +Factory::New( v8::Local source + , v8::ScriptOrigin const& origin) { + v8::ScriptCompiler::Source src(source, origin); + return v8::ScriptCompiler::CompileUnbound(v8::Isolate::GetCurrent(), &src); +} +#endif + +} // end of namespace imp + +//=== Presistents and Handles ================================================== + +#if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION +template +inline v8::Local New(v8::Handle h) { + return v8::Local::New(v8::Isolate::GetCurrent(), h); +} +#endif + +template +inline v8::Local New(v8::Persistent const& p) { + return v8::Local::New(v8::Isolate::GetCurrent(), p); +} + +template +inline v8::Local New(Persistent const& p) { + return v8::Local::New(v8::Isolate::GetCurrent(), p); +} + +template +inline v8::Local New(Global const& p) { + return v8::Local::New(v8::Isolate::GetCurrent(), p); +} + +#endif // NAN_IMPLEMENTATION_12_INL_H_ diff --git a/node_modules/nan/nan_implementation_pre_12_inl.h b/node_modules/nan/nan_implementation_pre_12_inl.h new file mode 100644 index 00000000..1472421a --- /dev/null +++ b/node_modules/nan/nan_implementation_pre_12_inl.h @@ -0,0 +1,263 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_IMPLEMENTATION_PRE_12_INL_H_ +#define NAN_IMPLEMENTATION_PRE_12_INL_H_ + +//============================================================================== +// node v0.10 implementation +//============================================================================== + +namespace imp { + +//=== Array ==================================================================== + +Factory::return_t +Factory::New() { + return v8::Array::New(); +} + +Factory::return_t +Factory::New(int length) { + return v8::Array::New(length); +} + +//=== Boolean ================================================================== + +Factory::return_t +Factory::New(bool value) { + return v8::Boolean::New(value)->ToBoolean(); +} + +//=== Boolean Object =========================================================== + +Factory::return_t +Factory::New(bool value) { + return v8::BooleanObject::New(value).As(); +} + +//=== Context ================================================================== + +Factory::return_t +Factory::New( v8::ExtensionConfiguration* extensions + , v8::Local tmpl + , v8::Local obj) { + v8::Persistent ctx = v8::Context::New(extensions, tmpl, obj); + v8::Local lctx = v8::Local::New(ctx); + ctx.Dispose(); + return lctx; +} + +//=== Date ===================================================================== + +Factory::return_t +Factory::New(double value) { + return v8::Date::New(value).As(); +} + +//=== External ================================================================= + +Factory::return_t +Factory::New(void * value) { + return v8::External::New(value); +} + +//=== Function ================================================================= + +Factory::return_t +Factory::New( FunctionCallback callback + , v8::Local data) { + v8::HandleScope scope; + + return scope.Close(Factory::New( + callback, data, v8::Local()) + ->GetFunction()); +} + + +//=== FunctionTemplate ========================================================= + +Factory::return_t +Factory::New( FunctionCallback callback + , v8::Local data + , v8::Local signature) { + if (callback) { + v8::HandleScope scope; + + v8::Local tpl = v8::ObjectTemplate::New(); + tpl->SetInternalFieldCount(imp::kFunctionFieldCount); + v8::Local obj = tpl->NewInstance(); + + obj->SetInternalField( + imp::kFunctionIndex + , v8::External::New(reinterpret_cast(callback))); + + v8::Local val = v8::Local::New(data); + + if (!val.IsEmpty()) { + obj->SetInternalField(imp::kDataIndex, val); + } + + // Note(agnat): Emulate length argument here. Unfortunately, I couldn't find + // a way. Have at it though... + return scope.Close( + v8::FunctionTemplate::New(imp::FunctionCallbackWrapper + , obj + , signature)); + } else { + return v8::FunctionTemplate::New(0, data, signature); + } +} + +//=== Number =================================================================== + +Factory::return_t +Factory::New(double value) { + return v8::Number::New(value); +} + +//=== Number Object ============================================================ + +Factory::return_t +Factory::New(double value) { + return v8::NumberObject::New(value).As(); +} + +//=== Integer, Int32 and Uint32 ================================================ + +template +typename IntegerFactory::return_t +IntegerFactory::New(int32_t value) { + return To(T::New(value)); +} + +template +typename IntegerFactory::return_t +IntegerFactory::New(uint32_t value) { + return To(T::NewFromUnsigned(value)); +} + +Factory::return_t +Factory::New(int32_t value) { + return To(v8::Uint32::NewFromUnsigned(value)); +} + +Factory::return_t +Factory::New(uint32_t value) { + return To(v8::Uint32::NewFromUnsigned(value)); +} + + +//=== Object =================================================================== + +Factory::return_t +Factory::New() { + return v8::Object::New(); +} + +//=== Object Template ========================================================== + +Factory::return_t +Factory::New() { + return v8::ObjectTemplate::New(); +} + +//=== RegExp =================================================================== + +Factory::return_t +Factory::New( + v8::Local pattern + , v8::RegExp::Flags flags) { + return v8::RegExp::New(pattern, flags); +} + +//=== Script =================================================================== + +Factory::return_t +Factory::New( v8::Local source) { + return v8::Script::New(source); +} +Factory::return_t +Factory::New( v8::Local source + , v8::ScriptOrigin const& origin) { + return v8::Script::New(source, const_cast(&origin)); +} + +//=== Signature ================================================================ + +Factory::return_t +Factory::New(Factory::FTH receiver) { + return v8::Signature::New(receiver); +} + +//=== String =================================================================== + +Factory::return_t +Factory::New() { + return v8::String::Empty(); +} + +Factory::return_t +Factory::New(const char * value, int length) { + return v8::String::New(value, length); +} + +Factory::return_t +Factory::New( + std::string const& value) /* NOLINT(build/include_what_you_use) */ { + assert(value.size() <= INT_MAX && "string too long"); + return v8::String::New(value.data(), static_cast(value.size())); +} + +Factory::return_t +Factory::New(const uint16_t * value, int length) { + return v8::String::New(value, length); +} + +Factory::return_t +Factory::New(v8::String::ExternalStringResource * value) { + return v8::String::NewExternal(value); +} + +Factory::return_t +Factory::New(v8::String::ExternalAsciiStringResource * value) { + return v8::String::NewExternal(value); +} + +//=== String Object ============================================================ + +Factory::return_t +Factory::New(v8::Local value) { + return v8::StringObject::New(value).As(); +} + +} // end of namespace imp + +//=== Presistents and Handles ================================================== + +template +inline v8::Local New(v8::Handle h) { + return v8::Local::New(h); +} + +template +inline v8::Local New(v8::Persistent const& p) { + return v8::Local::New(p); +} + +template +inline v8::Local New(Persistent const& p) { + return v8::Local::New(p.persistent); +} + +template +inline v8::Local New(Global const& p) { + return v8::Local::New(p.persistent); +} + +#endif // NAN_IMPLEMENTATION_PRE_12_INL_H_ diff --git a/node_modules/nan/nan_json.h b/node_modules/nan/nan_json.h new file mode 100644 index 00000000..33ac8ba6 --- /dev/null +++ b/node_modules/nan/nan_json.h @@ -0,0 +1,166 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_JSON_H_ +#define NAN_JSON_H_ + +#if NODE_MODULE_VERSION < NODE_0_12_MODULE_VERSION +#define NAN_JSON_H_NEED_PARSE 1 +#else +#define NAN_JSON_H_NEED_PARSE 0 +#endif // NODE_MODULE_VERSION < NODE_0_12_MODULE_VERSION + +#if NODE_MODULE_VERSION >= NODE_7_0_MODULE_VERSION +#define NAN_JSON_H_NEED_STRINGIFY 0 +#else +#define NAN_JSON_H_NEED_STRINGIFY 1 +#endif // NODE_MODULE_VERSION >= NODE_7_0_MODULE_VERSION + +class JSON { + public: + JSON() { +#if NAN_JSON_H_NEED_PARSE + NAN_JSON_H_NEED_STRINGIFY + Nan::HandleScope scope; + + Nan::MaybeLocal maybe_global_json = Nan::Get( + Nan::GetCurrentContext()->Global(), + Nan::New("JSON").ToLocalChecked() + ); + + assert(!maybe_global_json.IsEmpty() && "global JSON is empty"); + v8::Local val_global_json = maybe_global_json.ToLocalChecked(); + + assert(val_global_json->IsObject() && "global JSON is not an object"); + Nan::MaybeLocal maybe_obj_global_json = + Nan::To(val_global_json); + + assert(!maybe_obj_global_json.IsEmpty() && "global JSON object is empty"); + v8::Local global_json = maybe_obj_global_json.ToLocalChecked(); + +#if NAN_JSON_H_NEED_PARSE + Nan::MaybeLocal maybe_parse_method = Nan::Get( + global_json, Nan::New("parse").ToLocalChecked() + ); + + assert(!maybe_parse_method.IsEmpty() && "JSON.parse is empty"); + v8::Local parse_method = maybe_parse_method.ToLocalChecked(); + + assert(parse_method->IsFunction() && "JSON.parse is not a function"); + parse_cb_.Reset(parse_method.As()); +#endif // NAN_JSON_H_NEED_PARSE + +#if NAN_JSON_H_NEED_STRINGIFY + Nan::MaybeLocal maybe_stringify_method = Nan::Get( + global_json, Nan::New("stringify").ToLocalChecked() + ); + + assert(!maybe_stringify_method.IsEmpty() && "JSON.stringify is empty"); + v8::Local stringify_method = + maybe_stringify_method.ToLocalChecked(); + + assert( + stringify_method->IsFunction() && "JSON.stringify is not a function" + ); + stringify_cb_.Reset(stringify_method.As()); +#endif // NAN_JSON_H_NEED_STRINGIFY +#endif // NAN_JSON_H_NEED_PARSE + NAN_JSON_H_NEED_STRINGIFY + } + + inline + Nan::MaybeLocal Parse(v8::Local json_string) { + Nan::EscapableHandleScope scope; +#if NAN_JSON_H_NEED_PARSE + return scope.Escape(parse(json_string)); +#else + Nan::MaybeLocal result; +#if NODE_MODULE_VERSION >= NODE_0_12_MODULE_VERSION && \ + NODE_MODULE_VERSION <= IOJS_2_0_MODULE_VERSION + result = v8::JSON::Parse(json_string); +#else +#if NODE_MODULE_VERSION > NODE_6_0_MODULE_VERSION + v8::Local context_or_isolate = Nan::GetCurrentContext(); +#else + v8::Isolate* context_or_isolate = v8::Isolate::GetCurrent(); +#endif // NODE_MODULE_VERSION > NODE_6_0_MODULE_VERSION + result = v8::JSON::Parse(context_or_isolate, json_string); +#endif // NODE_MODULE_VERSION >= NODE_0_12_MODULE_VERSION && + // NODE_MODULE_VERSION <= IOJS_2_0_MODULE_VERSION + if (result.IsEmpty()) return v8::Local(); + return scope.Escape(result.ToLocalChecked()); +#endif // NAN_JSON_H_NEED_PARSE + } + + inline + Nan::MaybeLocal Stringify(v8::Local json_object) { + Nan::EscapableHandleScope scope; + Nan::MaybeLocal result = +#if NAN_JSON_H_NEED_STRINGIFY + Nan::To(stringify(json_object)); +#else + v8::JSON::Stringify(Nan::GetCurrentContext(), json_object); +#endif // NAN_JSON_H_NEED_STRINGIFY + if (result.IsEmpty()) return v8::Local(); + return scope.Escape(result.ToLocalChecked()); + } + + inline + Nan::MaybeLocal Stringify(v8::Local json_object, + v8::Local gap) { + Nan::EscapableHandleScope scope; + Nan::MaybeLocal result = +#if NAN_JSON_H_NEED_STRINGIFY + Nan::To(stringify(json_object, gap)); +#else + v8::JSON::Stringify(Nan::GetCurrentContext(), json_object, gap); +#endif // NAN_JSON_H_NEED_STRINGIFY + if (result.IsEmpty()) return v8::Local(); + return scope.Escape(result.ToLocalChecked()); + } + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(JSON) +#if NAN_JSON_H_NEED_PARSE + Nan::Callback parse_cb_; +#endif // NAN_JSON_H_NEED_PARSE +#if NAN_JSON_H_NEED_STRINGIFY + Nan::Callback stringify_cb_; +#endif // NAN_JSON_H_NEED_STRINGIFY + +#if NAN_JSON_H_NEED_PARSE + inline v8::Local parse(v8::Local arg) { + assert(!parse_cb_.IsEmpty() && "parse_cb_ is empty"); + AsyncResource resource("nan:JSON.parse"); + return parse_cb_.Call(1, &arg, &resource).FromMaybe(v8::Local()); + } +#endif // NAN_JSON_H_NEED_PARSE + +#if NAN_JSON_H_NEED_STRINGIFY + inline v8::Local stringify(v8::Local arg) { + assert(!stringify_cb_.IsEmpty() && "stringify_cb_ is empty"); + AsyncResource resource("nan:JSON.stringify"); + return stringify_cb_.Call(1, &arg, &resource) + .FromMaybe(v8::Local()); + } + + inline v8::Local stringify(v8::Local arg, + v8::Local gap) { + assert(!stringify_cb_.IsEmpty() && "stringify_cb_ is empty"); + + v8::Local argv[] = { + arg, + Nan::Null(), + gap + }; + AsyncResource resource("nan:JSON.stringify"); + return stringify_cb_.Call(3, argv, &resource) + .FromMaybe(v8::Local()); + } +#endif // NAN_JSON_H_NEED_STRINGIFY +}; + +#endif // NAN_JSON_H_ diff --git a/node_modules/nan/nan_maybe_43_inl.h b/node_modules/nan/nan_maybe_43_inl.h new file mode 100644 index 00000000..c04ce30d --- /dev/null +++ b/node_modules/nan/nan_maybe_43_inl.h @@ -0,0 +1,356 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_MAYBE_43_INL_H_ +#define NAN_MAYBE_43_INL_H_ + +template +using MaybeLocal = v8::MaybeLocal; + +inline +MaybeLocal ToDetailString(v8::Local val) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(val->ToDetailString(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); +} + +inline +MaybeLocal ToArrayIndex(v8::Local val) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(val->ToArrayIndex(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); +} + +inline +Maybe Equals(v8::Local a, v8::Local(b)) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return a->Equals(isolate->GetCurrentContext(), b); +} + +inline +MaybeLocal NewInstance(v8::Local h) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(h->NewInstance(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); +} + +inline +MaybeLocal NewInstance( + v8::Local h + , int argc + , v8::Local argv[]) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(h->NewInstance(isolate->GetCurrentContext(), argc, argv) + .FromMaybe(v8::Local())); +} + +inline +MaybeLocal NewInstance(v8::Local h) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(h->NewInstance(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); +} + + +inline MaybeLocal GetFunction( + v8::Local t) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(t->GetFunction(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); +} + +inline Maybe Set( + v8::Local obj + , v8::Local key + , v8::Local value) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->Set(isolate->GetCurrentContext(), key, value); +} + +inline Maybe Set( + v8::Local obj + , uint32_t index + , v8::Local value) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->Set(isolate->GetCurrentContext(), index, value); +} + +#if NODE_MODULE_VERSION < NODE_4_0_MODULE_VERSION +#include "nan_define_own_property_helper.h" // NOLINT(build/include) +#endif + +inline Maybe DefineOwnProperty( + v8::Local obj + , v8::Local key + , v8::Local value + , v8::PropertyAttribute attribs = v8::None) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); +#if NODE_MODULE_VERSION >= NODE_4_0_MODULE_VERSION + return obj->DefineOwnProperty(isolate->GetCurrentContext(), key, value, + attribs); +#else + Maybe maybeCurrent = + obj->GetPropertyAttributes(isolate->GetCurrentContext(), key); + if (maybeCurrent.IsNothing()) { + return Nothing(); + } + v8::PropertyAttribute current = maybeCurrent.FromJust(); + return imp::DefineOwnPropertyHelper(current, obj, key, value, attribs); +#endif +} + +NAN_DEPRECATED inline Maybe ForceSet( + v8::Local obj + , v8::Local key + , v8::Local value + , v8::PropertyAttribute attribs = v8::None) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); +#if NODE_MODULE_VERSION >= NODE_9_0_MODULE_VERSION + return key->IsName() + ? obj->DefineOwnProperty(isolate->GetCurrentContext(), + key.As(), value, attribs) + : Nothing(); +#else + return obj->ForceSet(isolate->GetCurrentContext(), key, value, attribs); +#endif +} + +inline MaybeLocal Get( + v8::Local obj + , v8::Local key) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(obj->Get(isolate->GetCurrentContext(), key) + .FromMaybe(v8::Local())); +} + +inline +MaybeLocal Get(v8::Local obj, uint32_t index) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(obj->Get(isolate->GetCurrentContext(), index) + .FromMaybe(v8::Local())); +} + +inline v8::PropertyAttribute GetPropertyAttributes( + v8::Local obj + , v8::Local key) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->GetPropertyAttributes(isolate->GetCurrentContext(), key) + .FromJust(); +} + +inline Maybe Has( + v8::Local obj + , v8::Local key) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->Has(isolate->GetCurrentContext(), key); +} + +inline Maybe Has(v8::Local obj, uint32_t index) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->Has(isolate->GetCurrentContext(), index); +} + +inline Maybe Delete( + v8::Local obj + , v8::Local key) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->Delete(isolate->GetCurrentContext(), key); +} + +inline +Maybe Delete(v8::Local obj, uint32_t index) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->Delete(isolate->GetCurrentContext(), index); +} + +inline +MaybeLocal GetPropertyNames(v8::Local obj) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(obj->GetPropertyNames(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); +} + +inline +MaybeLocal GetOwnPropertyNames(v8::Local obj) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(obj->GetOwnPropertyNames(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); +} + +inline Maybe SetPrototype( + v8::Local obj + , v8::Local prototype) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->SetPrototype(isolate->GetCurrentContext(), prototype); +} + +inline MaybeLocal ObjectProtoToString( + v8::Local obj) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(obj->ObjectProtoToString(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); +} + +inline Maybe HasOwnProperty( + v8::Local obj + , v8::Local key) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->HasOwnProperty(isolate->GetCurrentContext(), key); +} + +inline Maybe HasRealNamedProperty( + v8::Local obj + , v8::Local key) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->HasRealNamedProperty(isolate->GetCurrentContext(), key); +} + +inline Maybe HasRealIndexedProperty( + v8::Local obj + , uint32_t index) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->HasRealIndexedProperty(isolate->GetCurrentContext(), index); +} + +inline Maybe HasRealNamedCallbackProperty( + v8::Local obj + , v8::Local key) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return obj->HasRealNamedCallbackProperty(isolate->GetCurrentContext(), key); +} + +inline MaybeLocal GetRealNamedPropertyInPrototypeChain( + v8::Local obj + , v8::Local key) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(obj->GetRealNamedPropertyInPrototypeChain( + isolate->GetCurrentContext(), key) + .FromMaybe(v8::Local())); +} + +inline MaybeLocal GetRealNamedProperty( + v8::Local obj + , v8::Local key) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape( + obj->GetRealNamedProperty(isolate->GetCurrentContext(), key) + .FromMaybe(v8::Local())); +} + +inline MaybeLocal CallAsFunction( + v8::Local obj + , v8::Local recv + , int argc + , v8::Local argv[]) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape( + obj->CallAsFunction(isolate->GetCurrentContext(), recv, argc, argv) + .FromMaybe(v8::Local())); +} + +inline MaybeLocal CallAsConstructor( + v8::Local obj + , int argc, v8::Local argv[]) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape( + obj->CallAsConstructor(isolate->GetCurrentContext(), argc, argv) + .FromMaybe(v8::Local())); +} + +inline +MaybeLocal GetSourceLine(v8::Local msg) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(msg->GetSourceLine(isolate->GetCurrentContext()) + .FromMaybe(v8::Local())); +} + +inline Maybe GetLineNumber(v8::Local msg) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return msg->GetLineNumber(isolate->GetCurrentContext()); +} + +inline Maybe GetStartColumn(v8::Local msg) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return msg->GetStartColumn(isolate->GetCurrentContext()); +} + +inline Maybe GetEndColumn(v8::Local msg) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::HandleScope scope(isolate); + return msg->GetEndColumn(isolate->GetCurrentContext()); +} + +inline MaybeLocal CloneElementAt( + v8::Local array + , uint32_t index) { +#if (NODE_MODULE_VERSION >= NODE_6_0_MODULE_VERSION) + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + v8::Local context = isolate->GetCurrentContext(); + v8::Local elem; + v8::Local obj; + if (!array->Get(context, index).ToLocal(&elem)) { + return scope.Escape(obj); + } + if (!elem->ToObject(context).ToLocal(&obj)) { + return scope.Escape(v8::Local()); + } + return scope.Escape(obj->Clone()); +#else + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(array->CloneElementAt(isolate->GetCurrentContext(), index) + .FromMaybe(v8::Local())); +#endif +} + +inline MaybeLocal Call( + v8::Local fun + , v8::Local recv + , int argc + , v8::Local argv[]) { + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + return scope.Escape(fun->Call(isolate->GetCurrentContext(), recv, argc, argv) + .FromMaybe(v8::Local())); +} + +#endif // NAN_MAYBE_43_INL_H_ diff --git a/node_modules/nan/nan_maybe_pre_43_inl.h b/node_modules/nan/nan_maybe_pre_43_inl.h new file mode 100644 index 00000000..83325ae0 --- /dev/null +++ b/node_modules/nan/nan_maybe_pre_43_inl.h @@ -0,0 +1,268 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_MAYBE_PRE_43_INL_H_ +#define NAN_MAYBE_PRE_43_INL_H_ + +template +class MaybeLocal { + public: + inline MaybeLocal() : val_(v8::Local()) {} + + template +# if NODE_MODULE_VERSION >= NODE_0_12_MODULE_VERSION + inline + MaybeLocal(v8::Local that) : val_(that) {} // NOLINT(runtime/explicit) +# else + inline + MaybeLocal(v8::Local that) : // NOLINT(runtime/explicit) + val_(*reinterpret_cast*>(&that)) {} +# endif + + inline bool IsEmpty() const { return val_.IsEmpty(); } + + template + inline bool ToLocal(v8::Local *out) const { + *out = val_; + return !IsEmpty(); + } + + inline v8::Local ToLocalChecked() const { +#if defined(V8_ENABLE_CHECKS) + assert(!IsEmpty() && "ToLocalChecked is Empty"); +#endif // V8_ENABLE_CHECKS + return val_; + } + + template + inline v8::Local FromMaybe(v8::Local default_value) const { + return IsEmpty() ? default_value : v8::Local(val_); + } + + private: + v8::Local val_; +}; + +inline +MaybeLocal ToDetailString(v8::Handle val) { + return MaybeLocal(val->ToDetailString()); +} + +inline +MaybeLocal ToArrayIndex(v8::Handle val) { + return MaybeLocal(val->ToArrayIndex()); +} + +inline +Maybe Equals(v8::Handle a, v8::Handle(b)) { + return Just(a->Equals(b)); +} + +inline +MaybeLocal NewInstance(v8::Handle h) { + return MaybeLocal(h->NewInstance()); +} + +inline +MaybeLocal NewInstance( + v8::Local h + , int argc + , v8::Local argv[]) { + return MaybeLocal(h->NewInstance(argc, argv)); +} + +inline +MaybeLocal NewInstance(v8::Handle h) { + return MaybeLocal(h->NewInstance()); +} + +inline +MaybeLocal GetFunction(v8::Handle t) { + return MaybeLocal(t->GetFunction()); +} + +inline Maybe Set( + v8::Handle obj + , v8::Handle key + , v8::Handle value) { + return Just(obj->Set(key, value)); +} + +inline Maybe Set( + v8::Handle obj + , uint32_t index + , v8::Handle value) { + return Just(obj->Set(index, value)); +} + +#include "nan_define_own_property_helper.h" // NOLINT(build/include) + +inline Maybe DefineOwnProperty( + v8::Handle obj + , v8::Handle key + , v8::Handle value + , v8::PropertyAttribute attribs = v8::None) { + v8::PropertyAttribute current = obj->GetPropertyAttributes(key); + return imp::DefineOwnPropertyHelper(current, obj, key, value, attribs); +} + +NAN_DEPRECATED inline Maybe ForceSet( + v8::Handle obj + , v8::Handle key + , v8::Handle value + , v8::PropertyAttribute attribs = v8::None) { + return Just(obj->ForceSet(key, value, attribs)); +} + +inline MaybeLocal Get( + v8::Handle obj + , v8::Handle key) { + return MaybeLocal(obj->Get(key)); +} + +inline MaybeLocal Get( + v8::Handle obj + , uint32_t index) { + return MaybeLocal(obj->Get(index)); +} + +inline Maybe GetPropertyAttributes( + v8::Handle obj + , v8::Handle key) { + return Just(obj->GetPropertyAttributes(key)); +} + +inline Maybe Has( + v8::Handle obj + , v8::Handle key) { + return Just(obj->Has(key)); +} + +inline Maybe Has( + v8::Handle obj + , uint32_t index) { + return Just(obj->Has(index)); +} + +inline Maybe Delete( + v8::Handle obj + , v8::Handle key) { + return Just(obj->Delete(key)); +} + +inline Maybe Delete( + v8::Handle obj + , uint32_t index) { + return Just(obj->Delete(index)); +} + +inline +MaybeLocal GetPropertyNames(v8::Handle obj) { + return MaybeLocal(obj->GetPropertyNames()); +} + +inline +MaybeLocal GetOwnPropertyNames(v8::Handle obj) { + return MaybeLocal(obj->GetOwnPropertyNames()); +} + +inline Maybe SetPrototype( + v8::Handle obj + , v8::Handle prototype) { + return Just(obj->SetPrototype(prototype)); +} + +inline MaybeLocal ObjectProtoToString( + v8::Handle obj) { + return MaybeLocal(obj->ObjectProtoToString()); +} + +inline Maybe HasOwnProperty( + v8::Handle obj + , v8::Handle key) { + return Just(obj->HasOwnProperty(key)); +} + +inline Maybe HasRealNamedProperty( + v8::Handle obj + , v8::Handle key) { + return Just(obj->HasRealNamedProperty(key)); +} + +inline Maybe HasRealIndexedProperty( + v8::Handle obj + , uint32_t index) { + return Just(obj->HasRealIndexedProperty(index)); +} + +inline Maybe HasRealNamedCallbackProperty( + v8::Handle obj + , v8::Handle key) { + return Just(obj->HasRealNamedCallbackProperty(key)); +} + +inline MaybeLocal GetRealNamedPropertyInPrototypeChain( + v8::Handle obj + , v8::Handle key) { + return MaybeLocal( + obj->GetRealNamedPropertyInPrototypeChain(key)); +} + +inline MaybeLocal GetRealNamedProperty( + v8::Handle obj + , v8::Handle key) { + return MaybeLocal(obj->GetRealNamedProperty(key)); +} + +inline MaybeLocal CallAsFunction( + v8::Handle obj + , v8::Handle recv + , int argc + , v8::Handle argv[]) { + return MaybeLocal(obj->CallAsFunction(recv, argc, argv)); +} + +inline MaybeLocal CallAsConstructor( + v8::Handle obj + , int argc + , v8::Local argv[]) { + return MaybeLocal(obj->CallAsConstructor(argc, argv)); +} + +inline +MaybeLocal GetSourceLine(v8::Handle msg) { + return MaybeLocal(msg->GetSourceLine()); +} + +inline Maybe GetLineNumber(v8::Handle msg) { + return Just(msg->GetLineNumber()); +} + +inline Maybe GetStartColumn(v8::Handle msg) { + return Just(msg->GetStartColumn()); +} + +inline Maybe GetEndColumn(v8::Handle msg) { + return Just(msg->GetEndColumn()); +} + +inline MaybeLocal CloneElementAt( + v8::Handle array + , uint32_t index) { + return MaybeLocal(array->CloneElementAt(index)); +} + +inline MaybeLocal Call( + v8::Local fun + , v8::Local recv + , int argc + , v8::Local argv[]) { + return MaybeLocal(fun->Call(recv, argc, argv)); +} + +#endif // NAN_MAYBE_PRE_43_INL_H_ diff --git a/node_modules/nan/nan_new.h b/node_modules/nan/nan_new.h new file mode 100644 index 00000000..cdf8bbe4 --- /dev/null +++ b/node_modules/nan/nan_new.h @@ -0,0 +1,340 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_NEW_H_ +#define NAN_NEW_H_ + +namespace imp { // scnr + +// TODO(agnat): Generalize +template v8::Local To(v8::Local i); + +template <> +inline +v8::Local +To(v8::Local i) { + return Nan::To(i).ToLocalChecked(); +} + +template <> +inline +v8::Local +To(v8::Local i) { + return Nan::To(i).ToLocalChecked(); +} + +template <> +inline +v8::Local +To(v8::Local i) { + return Nan::To(i).ToLocalChecked(); +} + +template struct FactoryBase { + typedef v8::Local return_t; +}; + +template struct MaybeFactoryBase { + typedef MaybeLocal return_t; +}; + +template struct Factory; + +template <> +struct Factory : FactoryBase { + static inline return_t New(); + static inline return_t New(int length); +}; + +template <> +struct Factory : FactoryBase { + static inline return_t New(bool value); +}; + +template <> +struct Factory : FactoryBase { + static inline return_t New(bool value); +}; + +template <> +struct Factory : FactoryBase { + static inline + return_t + New( v8::ExtensionConfiguration* extensions = NULL + , v8::Local tmpl = v8::Local() + , v8::Local obj = v8::Local()); +}; + +template <> +struct Factory : MaybeFactoryBase { + static inline return_t New(double value); +}; + +template <> +struct Factory : FactoryBase { + static inline return_t New(void *value); +}; + +template <> +struct Factory : FactoryBase { + static inline + return_t + New( FunctionCallback callback + , v8::Local data = v8::Local()); +}; + +template <> +struct Factory : FactoryBase { + static inline + return_t + New( FunctionCallback callback = NULL + , v8::Local data = v8::Local() + , v8::Local signature = v8::Local()); +}; + +template <> +struct Factory : FactoryBase { + static inline return_t New(double value); +}; + +template <> +struct Factory : FactoryBase { + static inline return_t New(double value); +}; + +template +struct IntegerFactory : FactoryBase { + typedef typename FactoryBase::return_t return_t; + static inline return_t New(int32_t value); + static inline return_t New(uint32_t value); +}; + +template <> +struct Factory : IntegerFactory {}; + +template <> +struct Factory : IntegerFactory {}; + +template <> +struct Factory : FactoryBase { + static inline return_t New(int32_t value); + static inline return_t New(uint32_t value); +}; + +template <> +struct Factory : FactoryBase { + static inline return_t New(); +}; + +template <> +struct Factory : FactoryBase { + static inline return_t New(); +}; + +template <> +struct Factory : MaybeFactoryBase { + static inline return_t New( + v8::Local pattern, v8::RegExp::Flags flags); +}; + +template <> +struct Factory : MaybeFactoryBase { + static inline return_t New( v8::Local source); + static inline return_t New( v8::Local source + , v8::ScriptOrigin const& origin); +}; + +template <> +struct Factory : FactoryBase { + typedef v8::Local FTH; + static inline return_t New(FTH receiver = FTH()); +}; + +template <> +struct Factory : MaybeFactoryBase { + static inline return_t New(); + static inline return_t New(const char *value, int length = -1); + static inline return_t New(const uint16_t *value, int length = -1); + static inline return_t New(std::string const& value); + + static inline return_t New(v8::String::ExternalStringResource * value); + static inline return_t New(ExternalOneByteStringResource * value); +}; + +template <> +struct Factory : FactoryBase { + static inline return_t New(v8::Local value); +}; + +} // end of namespace imp + +#if (NODE_MODULE_VERSION >= 12) + +namespace imp { + +template <> +struct Factory : MaybeFactoryBase { + static inline return_t New( v8::Local source); + static inline return_t New( v8::Local source + , v8::ScriptOrigin const& origin); +}; + +} // end of namespace imp + +# include "nan_implementation_12_inl.h" + +#else // NODE_MODULE_VERSION >= 12 + +# include "nan_implementation_pre_12_inl.h" + +#endif + +//=== API ====================================================================== + +template +typename imp::Factory::return_t +New() { + return imp::Factory::New(); +} + +template +typename imp::Factory::return_t +New(A0 arg0) { + return imp::Factory::New(arg0); +} + +template +typename imp::Factory::return_t +New(A0 arg0, A1 arg1) { + return imp::Factory::New(arg0, arg1); +} + +template +typename imp::Factory::return_t +New(A0 arg0, A1 arg1, A2 arg2) { + return imp::Factory::New(arg0, arg1, arg2); +} + +template +typename imp::Factory::return_t +New(A0 arg0, A1 arg1, A2 arg2, A3 arg3) { + return imp::Factory::New(arg0, arg1, arg2, arg3); +} + +// Note(agnat): When passing overloaded function pointers to template functions +// as generic arguments the compiler needs help in picking the right overload. +// These two functions handle New and New with +// all argument variations. + +// v8::Function and v8::FunctionTemplate with one or two arguments +template +typename imp::Factory::return_t +New( FunctionCallback callback + , v8::Local data = v8::Local()) { + return imp::Factory::New(callback, data); +} + +// v8::Function and v8::FunctionTemplate with three arguments +template +typename imp::Factory::return_t +New( FunctionCallback callback + , v8::Local data = v8::Local() + , A2 a2 = A2()) { + return imp::Factory::New(callback, data, a2); +} + +// Convenience + +#if NODE_MODULE_VERSION < IOJS_3_0_MODULE_VERSION +template inline v8::Local New(v8::Handle h); +#endif + +#if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION +template + inline v8::Local New(v8::Persistent const& p); +#else +template inline v8::Local New(v8::Persistent const& p); +#endif +template +inline v8::Local New(Persistent const& p); +template +inline v8::Local New(Global const& p); + +inline +imp::Factory::return_t +New(bool value) { + return New(value); +} + +inline +imp::Factory::return_t +New(int32_t value) { + return New(value); +} + +inline +imp::Factory::return_t +New(uint32_t value) { + return New(value); +} + +inline +imp::Factory::return_t +New(double value) { + return New(value); +} + +inline +imp::Factory::return_t +New(std::string const& value) { // NOLINT(build/include_what_you_use) + return New(value); +} + +inline +imp::Factory::return_t +New(const char * value, int length) { + return New(value, length); +} + +inline +imp::Factory::return_t +New(const uint16_t * value, int length) { + return New(value, length); +} + +inline +imp::Factory::return_t +New(const char * value) { + return New(value); +} + +inline +imp::Factory::return_t +New(const uint16_t * value) { + return New(value); +} + +inline +imp::Factory::return_t +New(v8::String::ExternalStringResource * value) { + return New(value); +} + +inline +imp::Factory::return_t +New(ExternalOneByteStringResource * value) { + return New(value); +} + +inline +imp::Factory::return_t +New(v8::Local pattern, v8::RegExp::Flags flags) { + return New(pattern, flags); +} + +#endif // NAN_NEW_H_ diff --git a/node_modules/nan/nan_object_wrap.h b/node_modules/nan/nan_object_wrap.h new file mode 100644 index 00000000..386affaa --- /dev/null +++ b/node_modules/nan/nan_object_wrap.h @@ -0,0 +1,156 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_OBJECT_WRAP_H_ +#define NAN_OBJECT_WRAP_H_ + +class ObjectWrap { + public: + ObjectWrap() { + refs_ = 0; + } + + + virtual ~ObjectWrap() { + if (persistent().IsEmpty()) { + return; + } + + persistent().ClearWeak(); + persistent().Reset(); + } + + + template + static inline T* Unwrap(v8::Local object) { + assert(!object.IsEmpty()); + assert(object->InternalFieldCount() > 0); + // Cast to ObjectWrap before casting to T. A direct cast from void + // to T won't work right when T has more than one base class. + void* ptr = GetInternalFieldPointer(object, 0); + ObjectWrap* wrap = static_cast(ptr); + return static_cast(wrap); + } + + + inline v8::Local handle() const { + return New(handle_); + } + + + inline Persistent& persistent() { + return handle_; + } + + + protected: + inline void Wrap(v8::Local object) { + assert(persistent().IsEmpty()); + assert(object->InternalFieldCount() > 0); + SetInternalFieldPointer(object, 0, this); + persistent().Reset(object); + MakeWeak(); + } + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + + inline void MakeWeak() { + persistent().v8::PersistentBase::SetWeak( + this, WeakCallback, v8::WeakCallbackType::kParameter); +#if NODE_MAJOR_VERSION < 10 + // FIXME(bnoordhuis) Probably superfluous in older Node.js versions too. + persistent().MarkIndependent(); +#endif + } + +#elif NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + + inline void MakeWeak() { + persistent().v8::PersistentBase::SetWeak(this, WeakCallback); + persistent().MarkIndependent(); + } + +#else + + inline void MakeWeak() { + persistent().persistent.MakeWeak(this, WeakCallback); + persistent().MarkIndependent(); + } + +#endif + + /* Ref() marks the object as being attached to an event loop. + * Refed objects will not be garbage collected, even if + * all references are lost. + */ + virtual void Ref() { + assert(!persistent().IsEmpty()); + persistent().ClearWeak(); + refs_++; + } + + /* Unref() marks an object as detached from the event loop. This is its + * default state. When an object with a "weak" reference changes from + * attached to detached state it will be freed. Be careful not to access + * the object after making this call as it might be gone! + * (A "weak reference" means an object that only has a + * persistant handle.) + * + * DO NOT CALL THIS FROM DESTRUCTOR + */ + virtual void Unref() { + assert(!persistent().IsEmpty()); + assert(!persistent().IsWeak()); + assert(refs_ > 0); + if (--refs_ == 0) + MakeWeak(); + } + + int refs_; // ro + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(ObjectWrap) +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + + static void + WeakCallback(v8::WeakCallbackInfo const& info) { + ObjectWrap* wrap = info.GetParameter(); + assert(wrap->refs_ == 0); + wrap->handle_.Reset(); + delete wrap; + } + +#elif NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + + static void + WeakCallback(v8::WeakCallbackData const& data) { + ObjectWrap* wrap = data.GetParameter(); + assert(wrap->refs_ == 0); + assert(wrap->handle_.IsNearDeath()); + wrap->handle_.Reset(); + delete wrap; + } + +#else + + static void WeakCallback(v8::Persistent value, void *data) { + ObjectWrap *wrap = static_cast(data); + assert(wrap->refs_ == 0); + assert(wrap->handle_.IsNearDeath()); + wrap->handle_.Reset(); + delete wrap; + } + +#endif + Persistent handle_; +}; + + +#endif // NAN_OBJECT_WRAP_H_ diff --git a/node_modules/nan/nan_persistent_12_inl.h b/node_modules/nan/nan_persistent_12_inl.h new file mode 100644 index 00000000..d9649e86 --- /dev/null +++ b/node_modules/nan/nan_persistent_12_inl.h @@ -0,0 +1,132 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_PERSISTENT_12_INL_H_ +#define NAN_PERSISTENT_12_INL_H_ + +template class Persistent : + public v8::Persistent { + public: + inline Persistent() : v8::Persistent() {} + + template inline Persistent(v8::Local that) : + v8::Persistent(v8::Isolate::GetCurrent(), that) {} + + template + inline + Persistent(const v8::Persistent &that) : // NOLINT(runtime/explicit) + v8::Persistent(v8::Isolate::GetCurrent(), that) {} + + inline void Reset() { v8::PersistentBase::Reset(); } + + template + inline void Reset(const v8::Local &other) { + v8::PersistentBase::Reset(v8::Isolate::GetCurrent(), other); + } + + template + inline void Reset(const v8::PersistentBase &other) { + v8::PersistentBase::Reset(v8::Isolate::GetCurrent(), other); + } + + template + inline void SetWeak( + P *parameter + , typename WeakCallbackInfo

::Callback callback + , WeakCallbackType type); + + private: + inline T *operator*() const { return *PersistentBase::persistent; } + + template + inline void Copy(const Persistent &that) { + TYPE_CHECK(T, S); + + this->Reset(); + + if (!that.IsEmpty()) { + this->Reset(that); + M::Copy(that, this); + } + } +}; + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +template +class Global : public v8::Global { + public: + inline Global() : v8::Global() {} + + template inline Global(v8::Local that) : + v8::Global(v8::Isolate::GetCurrent(), that) {} + + template + inline + Global(const v8::PersistentBase &that) : // NOLINT(runtime/explicit) + v8::Global(v8::Isolate::GetCurrent(), that) {} + + inline void Reset() { v8::PersistentBase::Reset(); } + + template + inline void Reset(const v8::Local &other) { + v8::PersistentBase::Reset(v8::Isolate::GetCurrent(), other); + } + + template + inline void Reset(const v8::PersistentBase &other) { + v8::PersistentBase::Reset(v8::Isolate::GetCurrent(), other); + } + + template + inline void SetWeak( + P *parameter + , typename WeakCallbackInfo

::Callback callback + , WeakCallbackType type) { + reinterpret_cast*>(this)->SetWeak( + parameter, callback, type); + } +}; +#else +template +class Global : public v8::UniquePersistent { + public: + inline Global() : v8::UniquePersistent() {} + + template inline Global(v8::Local that) : + v8::UniquePersistent(v8::Isolate::GetCurrent(), that) {} + + template + inline + Global(const v8::PersistentBase &that) : // NOLINT(runtime/explicit) + v8::UniquePersistent(v8::Isolate::GetCurrent(), that) {} + + inline void Reset() { v8::PersistentBase::Reset(); } + + template + inline void Reset(const v8::Local &other) { + v8::PersistentBase::Reset(v8::Isolate::GetCurrent(), other); + } + + template + inline void Reset(const v8::PersistentBase &other) { + v8::PersistentBase::Reset(v8::Isolate::GetCurrent(), other); + } + + template + inline void SetWeak( + P *parameter + , typename WeakCallbackInfo

::Callback callback + , WeakCallbackType type) { + reinterpret_cast*>(this)->SetWeak( + parameter, callback, type); + } +}; +#endif + +#endif // NAN_PERSISTENT_12_INL_H_ diff --git a/node_modules/nan/nan_persistent_pre_12_inl.h b/node_modules/nan/nan_persistent_pre_12_inl.h new file mode 100644 index 00000000..4c9c59da --- /dev/null +++ b/node_modules/nan/nan_persistent_pre_12_inl.h @@ -0,0 +1,242 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_PERSISTENT_PRE_12_INL_H_ +#define NAN_PERSISTENT_PRE_12_INL_H_ + +template +class PersistentBase { + v8::Persistent persistent; + template + friend v8::Local New(const PersistentBase &p); + template + friend v8::Local New(const Persistent &p); + template + friend v8::Local New(const Global &p); + template friend class ReturnValue; + + public: + inline PersistentBase() : + persistent() {} + + inline void Reset() { + persistent.Dispose(); + persistent.Clear(); + } + + template + inline void Reset(const v8::Local &other) { + TYPE_CHECK(T, S); + + if (!persistent.IsEmpty()) { + persistent.Dispose(); + } + + if (other.IsEmpty()) { + persistent.Clear(); + } else { + persistent = v8::Persistent::New(other); + } + } + + template + inline void Reset(const PersistentBase &other) { + TYPE_CHECK(T, S); + + if (!persistent.IsEmpty()) { + persistent.Dispose(); + } + + if (other.IsEmpty()) { + persistent.Clear(); + } else { + persistent = v8::Persistent::New(other.persistent); + } + } + + inline bool IsEmpty() const { return persistent.IsEmpty(); } + + inline void Empty() { persistent.Clear(); } + + template + inline bool operator==(const PersistentBase &that) const { + return this->persistent == that.persistent; + } + + template + inline bool operator==(const v8::Local &that) const { + return this->persistent == that; + } + + template + inline bool operator!=(const PersistentBase &that) const { + return !operator==(that); + } + + template + inline bool operator!=(const v8::Local &that) const { + return !operator==(that); + } + + template + inline void SetWeak( + P *parameter + , typename WeakCallbackInfo

::Callback callback + , WeakCallbackType type); + + inline void ClearWeak() { persistent.ClearWeak(); } + + inline void MarkIndependent() { persistent.MarkIndependent(); } + + inline bool IsIndependent() const { return persistent.IsIndependent(); } + + inline bool IsNearDeath() const { return persistent.IsNearDeath(); } + + inline bool IsWeak() const { return persistent.IsWeak(); } + + private: + inline explicit PersistentBase(v8::Persistent that) : + persistent(that) { } + inline explicit PersistentBase(T *val) : persistent(val) {} + template friend class Persistent; + template friend class Global; + friend class ObjectWrap; +}; + +template +class NonCopyablePersistentTraits { + public: + typedef Persistent > + NonCopyablePersistent; + static const bool kResetInDestructor = false; + template + inline static void Copy(const Persistent &source, + NonCopyablePersistent *dest) { + Uncompilable(); + } + + template inline static void Uncompilable() { + TYPE_CHECK(O, v8::Primitive); + } +}; + +template +struct CopyablePersistentTraits { + typedef Persistent > CopyablePersistent; + static const bool kResetInDestructor = true; + template + static inline void Copy(const Persistent &source, + CopyablePersistent *dest) {} +}; + +template class Persistent : + public PersistentBase { + public: + inline Persistent() {} + + template inline Persistent(v8::Handle that) + : PersistentBase(v8::Persistent::New(that)) { + TYPE_CHECK(T, S); + } + + inline Persistent(const Persistent &that) : PersistentBase() { + Copy(that); + } + + template + inline Persistent(const Persistent &that) : + PersistentBase() { + Copy(that); + } + + inline Persistent &operator=(const Persistent &that) { + Copy(that); + return *this; + } + + template + inline Persistent &operator=(const Persistent &that) { + Copy(that); + return *this; + } + + inline ~Persistent() { + if (M::kResetInDestructor) this->Reset(); + } + + private: + inline T *operator*() const { return *PersistentBase::persistent; } + + template + inline void Copy(const Persistent &that) { + TYPE_CHECK(T, S); + + this->Reset(); + + if (!that.IsEmpty()) { + this->persistent = v8::Persistent::New(that.persistent); + M::Copy(that, this); + } + } +}; + +template +class Global : public PersistentBase { + struct RValue { + inline explicit RValue(Global* obj) : object(obj) {} + Global* object; + }; + + public: + inline Global() : PersistentBase(0) { } + + template + inline Global(v8::Local that) // NOLINT(runtime/explicit) + : PersistentBase(v8::Persistent::New(that)) { + TYPE_CHECK(T, S); + } + + template + inline Global(const PersistentBase &that) // NOLINT(runtime/explicit) + : PersistentBase(that) { + TYPE_CHECK(T, S); + } + /** + * Move constructor. + */ + inline Global(RValue rvalue) // NOLINT(runtime/explicit) + : PersistentBase(rvalue.object->persistent) { + rvalue.object->Reset(); + } + inline ~Global() { this->Reset(); } + /** + * Move via assignment. + */ + template + inline Global &operator=(Global rhs) { + TYPE_CHECK(T, S); + this->Reset(rhs.persistent); + rhs.Reset(); + return *this; + } + /** + * Cast operator for moves. + */ + inline operator RValue() { return RValue(this); } + /** + * Pass allows returning uniques from functions, etc. + */ + Global Pass() { return Global(RValue(this)); } + + private: + Global(Global &); + void operator=(Global &); + template friend class ReturnValue; +}; + +#endif // NAN_PERSISTENT_PRE_12_INL_H_ diff --git a/node_modules/nan/nan_private.h b/node_modules/nan/nan_private.h new file mode 100644 index 00000000..15f44cc8 --- /dev/null +++ b/node_modules/nan/nan_private.h @@ -0,0 +1,73 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_PRIVATE_H_ +#define NAN_PRIVATE_H_ + +inline Maybe +HasPrivate(v8::Local object, v8::Local key) { + HandleScope scope; +#if NODE_MODULE_VERSION >= NODE_6_0_MODULE_VERSION + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::Local context = isolate->GetCurrentContext(); + v8::Local private_key = v8::Private::ForApi(isolate, key); + return object->HasPrivate(context, private_key); +#else + return Just(!object->GetHiddenValue(key).IsEmpty()); +#endif +} + +inline MaybeLocal +GetPrivate(v8::Local object, v8::Local key) { +#if NODE_MODULE_VERSION >= NODE_6_0_MODULE_VERSION + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::EscapableHandleScope scope(isolate); + v8::Local context = isolate->GetCurrentContext(); + v8::Local private_key = v8::Private::ForApi(isolate, key); + v8::MaybeLocal v = object->GetPrivate(context, private_key); + return scope.Escape(v.ToLocalChecked()); +#else + EscapableHandleScope scope; + v8::Local v = object->GetHiddenValue(key); + if (v.IsEmpty()) { + v = Undefined(); + } + return scope.Escape(v); +#endif +} + +inline Maybe SetPrivate( + v8::Local object, + v8::Local key, + v8::Local value) { +#if NODE_MODULE_VERSION >= NODE_6_0_MODULE_VERSION + HandleScope scope; + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::Local context = isolate->GetCurrentContext(); + v8::Local private_key = v8::Private::ForApi(isolate, key); + return object->SetPrivate(context, private_key, value); +#else + return Just(object->SetHiddenValue(key, value)); +#endif +} + +inline Maybe DeletePrivate( + v8::Local object, + v8::Local key) { +#if NODE_MODULE_VERSION >= NODE_6_0_MODULE_VERSION + HandleScope scope; + v8::Isolate *isolate = v8::Isolate::GetCurrent(); + v8::Local private_key = v8::Private::ForApi(isolate, key); + return object->DeletePrivate(isolate->GetCurrentContext(), private_key); +#else + return Just(object->DeleteHiddenValue(key)); +#endif +} + +#endif // NAN_PRIVATE_H_ + diff --git a/node_modules/nan/nan_string_bytes.h b/node_modules/nan/nan_string_bytes.h new file mode 100644 index 00000000..a2e6437d --- /dev/null +++ b/node_modules/nan/nan_string_bytes.h @@ -0,0 +1,305 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +#ifndef NAN_STRING_BYTES_H_ +#define NAN_STRING_BYTES_H_ + +// Decodes a v8::Local or Buffer to a raw char* + +namespace imp { + +using v8::Local; +using v8::Object; +using v8::String; +using v8::Value; + + +//// Base 64 //// + +#define base64_encoded_size(size) ((size + 2 - ((size + 2) % 3)) / 3 * 4) + + + +//// HEX //// + +static bool contains_non_ascii_slow(const char* buf, size_t len) { + for (size_t i = 0; i < len; ++i) { + if (buf[i] & 0x80) return true; + } + return false; +} + + +static bool contains_non_ascii(const char* src, size_t len) { + if (len < 16) { + return contains_non_ascii_slow(src, len); + } + + const unsigned bytes_per_word = sizeof(void*); + const unsigned align_mask = bytes_per_word - 1; + const unsigned unaligned = reinterpret_cast(src) & align_mask; + + if (unaligned > 0) { + const unsigned n = bytes_per_word - unaligned; + if (contains_non_ascii_slow(src, n)) return true; + src += n; + len -= n; + } + + +#if defined(__x86_64__) || defined(_WIN64) + const uintptr_t mask = 0x8080808080808080ll; +#else + const uintptr_t mask = 0x80808080l; +#endif + + const uintptr_t* srcw = reinterpret_cast(src); + + for (size_t i = 0, n = len / bytes_per_word; i < n; ++i) { + if (srcw[i] & mask) return true; + } + + const unsigned remainder = len & align_mask; + if (remainder > 0) { + const size_t offset = len - remainder; + if (contains_non_ascii_slow(src + offset, remainder)) return true; + } + + return false; +} + + +static void force_ascii_slow(const char* src, char* dst, size_t len) { + for (size_t i = 0; i < len; ++i) { + dst[i] = src[i] & 0x7f; + } +} + + +static void force_ascii(const char* src, char* dst, size_t len) { + if (len < 16) { + force_ascii_slow(src, dst, len); + return; + } + + const unsigned bytes_per_word = sizeof(void*); + const unsigned align_mask = bytes_per_word - 1; + const unsigned src_unalign = reinterpret_cast(src) & align_mask; + const unsigned dst_unalign = reinterpret_cast(dst) & align_mask; + + if (src_unalign > 0) { + if (src_unalign == dst_unalign) { + const unsigned unalign = bytes_per_word - src_unalign; + force_ascii_slow(src, dst, unalign); + src += unalign; + dst += unalign; + len -= src_unalign; + } else { + force_ascii_slow(src, dst, len); + return; + } + } + +#if defined(__x86_64__) || defined(_WIN64) + const uintptr_t mask = ~0x8080808080808080ll; +#else + const uintptr_t mask = ~0x80808080l; +#endif + + const uintptr_t* srcw = reinterpret_cast(src); + uintptr_t* dstw = reinterpret_cast(dst); + + for (size_t i = 0, n = len / bytes_per_word; i < n; ++i) { + dstw[i] = srcw[i] & mask; + } + + const unsigned remainder = len & align_mask; + if (remainder > 0) { + const size_t offset = len - remainder; + force_ascii_slow(src + offset, dst + offset, remainder); + } +} + + +static size_t base64_encode(const char* src, + size_t slen, + char* dst, + size_t dlen) { + // We know how much we'll write, just make sure that there's space. + assert(dlen >= base64_encoded_size(slen) && + "not enough space provided for base64 encode"); + + dlen = base64_encoded_size(slen); + + unsigned a; + unsigned b; + unsigned c; + unsigned i; + unsigned k; + unsigned n; + + static const char table[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz" + "0123456789+/"; + + i = 0; + k = 0; + n = slen / 3 * 3; + + while (i < n) { + a = src[i + 0] & 0xff; + b = src[i + 1] & 0xff; + c = src[i + 2] & 0xff; + + dst[k + 0] = table[a >> 2]; + dst[k + 1] = table[((a & 3) << 4) | (b >> 4)]; + dst[k + 2] = table[((b & 0x0f) << 2) | (c >> 6)]; + dst[k + 3] = table[c & 0x3f]; + + i += 3; + k += 4; + } + + if (n != slen) { + switch (slen - n) { + case 1: + a = src[i + 0] & 0xff; + dst[k + 0] = table[a >> 2]; + dst[k + 1] = table[(a & 3) << 4]; + dst[k + 2] = '='; + dst[k + 3] = '='; + break; + + case 2: + a = src[i + 0] & 0xff; + b = src[i + 1] & 0xff; + dst[k + 0] = table[a >> 2]; + dst[k + 1] = table[((a & 3) << 4) | (b >> 4)]; + dst[k + 2] = table[(b & 0x0f) << 2]; + dst[k + 3] = '='; + break; + } + } + + return dlen; +} + + +static size_t hex_encode(const char* src, size_t slen, char* dst, size_t dlen) { + // We know how much we'll write, just make sure that there's space. + assert(dlen >= slen * 2 && + "not enough space provided for hex encode"); + + dlen = slen * 2; + for (uint32_t i = 0, k = 0; k < dlen; i += 1, k += 2) { + static const char hex[] = "0123456789abcdef"; + uint8_t val = static_cast(src[i]); + dst[k + 0] = hex[val >> 4]; + dst[k + 1] = hex[val & 15]; + } + + return dlen; +} + + + +static Local Encode(const char* buf, + size_t buflen, + enum Encoding encoding) { + assert(buflen <= node::Buffer::kMaxLength); + if (!buflen && encoding != BUFFER) + return New("").ToLocalChecked(); + + Local val; + switch (encoding) { + case BUFFER: + return CopyBuffer(buf, buflen).ToLocalChecked(); + + case ASCII: + if (contains_non_ascii(buf, buflen)) { + char* out = new char[buflen]; + force_ascii(buf, out, buflen); + val = New(out, buflen).ToLocalChecked(); + delete[] out; + } else { + val = New(buf, buflen).ToLocalChecked(); + } + break; + + case UTF8: + val = New(buf, buflen).ToLocalChecked(); + break; + + case BINARY: { + // TODO(isaacs) use ExternalTwoByteString? + const unsigned char *cbuf = reinterpret_cast(buf); + uint16_t * twobytebuf = new uint16_t[buflen]; + for (size_t i = 0; i < buflen; i++) { + // XXX is the following line platform independent? + twobytebuf[i] = cbuf[i]; + } + val = New(twobytebuf, buflen).ToLocalChecked(); + delete[] twobytebuf; + break; + } + + case BASE64: { + size_t dlen = base64_encoded_size(buflen); + char* dst = new char[dlen]; + + size_t written = base64_encode(buf, buflen, dst, dlen); + assert(written == dlen); + + val = New(dst, dlen).ToLocalChecked(); + delete[] dst; + break; + } + + case UCS2: { + const uint16_t* data = reinterpret_cast(buf); + val = New(data, buflen / 2).ToLocalChecked(); + break; + } + + case HEX: { + size_t dlen = buflen * 2; + char* dst = new char[dlen]; + size_t written = hex_encode(buf, buflen, dst, dlen); + assert(written == dlen); + + val = New(dst, dlen).ToLocalChecked(); + delete[] dst; + break; + } + + default: + assert(0 && "unknown encoding"); + break; + } + + return val; +} + +#undef base64_encoded_size + +} // end of namespace imp + +#endif // NAN_STRING_BYTES_H_ diff --git a/node_modules/nan/nan_typedarray_contents.h b/node_modules/nan/nan_typedarray_contents.h new file mode 100644 index 00000000..d28ae323 --- /dev/null +++ b/node_modules/nan/nan_typedarray_contents.h @@ -0,0 +1,90 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_TYPEDARRAY_CONTENTS_H_ +#define NAN_TYPEDARRAY_CONTENTS_H_ + +template +class TypedArrayContents { + public: + inline explicit TypedArrayContents(v8::Local from) : + length_(0), data_(NULL) { + HandleScope scope; + + size_t length = 0; + void* data = NULL; + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + + if (from->IsArrayBufferView()) { + v8::Local array = + v8::Local::Cast(from); + + const size_t byte_length = array->ByteLength(); + const ptrdiff_t byte_offset = array->ByteOffset(); + v8::Local buffer = array->Buffer(); + + length = byte_length / sizeof(T); + data = static_cast(buffer->GetContents().Data()) + byte_offset; + } + +#else + + if (from->IsObject() && !from->IsNull()) { + v8::Local array = v8::Local::Cast(from); + + MaybeLocal buffer = Get(array, + New("buffer").ToLocalChecked()); + MaybeLocal byte_length = Get(array, + New("byteLength").ToLocalChecked()); + MaybeLocal byte_offset = Get(array, + New("byteOffset").ToLocalChecked()); + + if (!buffer.IsEmpty() && + !byte_length.IsEmpty() && byte_length.ToLocalChecked()->IsUint32() && + !byte_offset.IsEmpty() && byte_offset.ToLocalChecked()->IsUint32()) { + data = array->GetIndexedPropertiesExternalArrayData(); + if(data) { + length = byte_length.ToLocalChecked()->Uint32Value() / sizeof(T); + } + } + } + +#endif + +#if defined(_MSC_VER) && _MSC_VER >= 1900 || __cplusplus >= 201103L + assert(reinterpret_cast(data) % alignof (T) == 0); +#elif defined(_MSC_VER) && _MSC_VER >= 1600 || defined(__GNUC__) + assert(reinterpret_cast(data) % __alignof(T) == 0); +#else + assert(reinterpret_cast(data) % sizeof (T) == 0); +#endif + + length_ = length; + data_ = static_cast(data); + } + + inline size_t length() const { return length_; } + inline T* operator*() { return data_; } + inline const T* operator*() const { return data_; } + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(TypedArrayContents) + + //Disable heap allocation + void *operator new(size_t size); + void operator delete(void *, size_t) { + abort(); + } + + size_t length_; + T* data_; +}; + +#endif // NAN_TYPEDARRAY_CONTENTS_H_ diff --git a/node_modules/nan/nan_weak.h b/node_modules/nan/nan_weak.h new file mode 100644 index 00000000..7e7ab07b --- /dev/null +++ b/node_modules/nan/nan_weak.h @@ -0,0 +1,437 @@ +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +#ifndef NAN_WEAK_H_ +#define NAN_WEAK_H_ + +static const int kInternalFieldsInWeakCallback = 2; +static const int kNoInternalFieldIndex = -1; + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +# define NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ \ + v8::WeakCallbackInfo > const& +# define NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ \ + NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ +# define NAN_WEAK_PARAMETER_CALLBACK_SIG_ NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ +# define NAN_WEAK_TWOFIELD_CALLBACK_SIG_ NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ +#elif NODE_MODULE_VERSION > IOJS_1_1_MODULE_VERSION +# define NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ \ + v8::PhantomCallbackData > const& +# define NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ \ + NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ +# define NAN_WEAK_PARAMETER_CALLBACK_SIG_ NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ +# define NAN_WEAK_TWOFIELD_CALLBACK_SIG_ NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ +#elif NODE_MODULE_VERSION > NODE_0_12_MODULE_VERSION +# define NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ \ + v8::PhantomCallbackData > const& +# define NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ \ + v8::InternalFieldsCallbackData, void> const& +# define NAN_WEAK_PARAMETER_CALLBACK_SIG_ NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ +# define NAN_WEAK_TWOFIELD_CALLBACK_SIG_ NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ +#elif NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION +# define NAN_WEAK_CALLBACK_DATA_TYPE_ \ + v8::WeakCallbackData > const& +# define NAN_WEAK_CALLBACK_SIG_ NAN_WEAK_CALLBACK_DATA_TYPE_ +#else +# define NAN_WEAK_CALLBACK_DATA_TYPE_ void * +# define NAN_WEAK_CALLBACK_SIG_ \ + v8::Persistent, NAN_WEAK_CALLBACK_DATA_TYPE_ +#endif + +template +class WeakCallbackInfo { + public: + typedef void (*Callback)(const WeakCallbackInfo& data); + WeakCallbackInfo( + Persistent *persistent + , Callback callback + , void *parameter + , void *field1 = 0 + , void *field2 = 0) : + callback_(callback), isolate_(0), parameter_(parameter) { + std::memcpy(&persistent_, persistent, sizeof (v8::Persistent)); + internal_fields_[0] = field1; + internal_fields_[1] = field2; + } + inline v8::Isolate *GetIsolate() const { return isolate_; } + inline T *GetParameter() const { return static_cast(parameter_); } + inline void *GetInternalField(int index) const { + assert((index == 0 || index == 1) && "internal field index out of bounds"); + if (index == 0) { + return internal_fields_[0]; + } else { + return internal_fields_[1]; + } + } + + private: + NAN_DISALLOW_ASSIGN_COPY_MOVE(WeakCallbackInfo) + Callback callback_; + v8::Isolate *isolate_; + void *parameter_; + void *internal_fields_[kInternalFieldsInWeakCallback]; + v8::Persistent persistent_; + template friend class Persistent; + template friend class PersistentBase; +#if NODE_MODULE_VERSION <= NODE_0_12_MODULE_VERSION +# if NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + template + static void invoke(NAN_WEAK_CALLBACK_SIG_ data); + template + static WeakCallbackInfo *unwrap(NAN_WEAK_CALLBACK_DATA_TYPE_ data); +# else + static void invoke(NAN_WEAK_CALLBACK_SIG_ data); + static WeakCallbackInfo *unwrap(NAN_WEAK_CALLBACK_DATA_TYPE_ data); +# endif +#else +# if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + template + static void invokeparameter(NAN_WEAK_PARAMETER_CALLBACK_SIG_ data); + template + static void invoketwofield(NAN_WEAK_TWOFIELD_CALLBACK_SIG_ data); +# else + static void invokeparameter(NAN_WEAK_PARAMETER_CALLBACK_SIG_ data); + static void invoketwofield(NAN_WEAK_TWOFIELD_CALLBACK_SIG_ data); +# endif + static WeakCallbackInfo *unwrapparameter( + NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ data); + static WeakCallbackInfo *unwraptwofield( + NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ data); +#endif +}; + + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) + +template +template +void +WeakCallbackInfo::invokeparameter(NAN_WEAK_PARAMETER_CALLBACK_SIG_ data) { + WeakCallbackInfo *cbinfo = unwrapparameter(data); + if (isFirstPass) { + cbinfo->persistent_.Reset(); + data.SetSecondPassCallback(invokeparameter); + } else { + cbinfo->callback_(*cbinfo); + delete cbinfo; + } +} + +template +template +void +WeakCallbackInfo::invoketwofield(NAN_WEAK_TWOFIELD_CALLBACK_SIG_ data) { + WeakCallbackInfo *cbinfo = unwraptwofield(data); + if (isFirstPass) { + cbinfo->persistent_.Reset(); + data.SetSecondPassCallback(invoketwofield); + } else { + cbinfo->callback_(*cbinfo); + delete cbinfo; + } +} + +template +WeakCallbackInfo *WeakCallbackInfo::unwrapparameter( + NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ data) { + WeakCallbackInfo *cbinfo = + static_cast*>(data.GetParameter()); + cbinfo->isolate_ = data.GetIsolate(); + return cbinfo; +} + +template +WeakCallbackInfo *WeakCallbackInfo::unwraptwofield( + NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ data) { + WeakCallbackInfo *cbinfo = + static_cast*>(data.GetInternalField(0)); + cbinfo->isolate_ = data.GetIsolate(); + return cbinfo; +} + +#undef NAN_WEAK_PARAMETER_CALLBACK_SIG_ +#undef NAN_WEAK_TWOFIELD_CALLBACK_SIG_ +#undef NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ +#undef NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ +# elif NODE_MODULE_VERSION > NODE_0_12_MODULE_VERSION + +template +void +WeakCallbackInfo::invokeparameter(NAN_WEAK_PARAMETER_CALLBACK_SIG_ data) { + WeakCallbackInfo *cbinfo = unwrapparameter(data); + cbinfo->persistent_.Reset(); + cbinfo->callback_(*cbinfo); + delete cbinfo; +} + +template +void +WeakCallbackInfo::invoketwofield(NAN_WEAK_TWOFIELD_CALLBACK_SIG_ data) { + WeakCallbackInfo *cbinfo = unwraptwofield(data); + cbinfo->persistent_.Reset(); + cbinfo->callback_(*cbinfo); + delete cbinfo; +} + +template +WeakCallbackInfo *WeakCallbackInfo::unwrapparameter( + NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ data) { + WeakCallbackInfo *cbinfo = + static_cast*>(data.GetParameter()); + cbinfo->isolate_ = data.GetIsolate(); + return cbinfo; +} + +template +WeakCallbackInfo *WeakCallbackInfo::unwraptwofield( + NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ data) { + WeakCallbackInfo *cbinfo = + static_cast*>(data.GetInternalField1()); + cbinfo->isolate_ = data.GetIsolate(); + return cbinfo; +} + +#undef NAN_WEAK_PARAMETER_CALLBACK_SIG_ +#undef NAN_WEAK_TWOFIELD_CALLBACK_SIG_ +#undef NAN_WEAK_PARAMETER_CALLBACK_DATA_TYPE_ +#undef NAN_WEAK_TWOFIELD_CALLBACK_DATA_TYPE_ +#elif NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION + +template +template +void WeakCallbackInfo::invoke(NAN_WEAK_CALLBACK_SIG_ data) { + WeakCallbackInfo *cbinfo = unwrap(data); + cbinfo->persistent_.Reset(); + cbinfo->callback_(*cbinfo); + delete cbinfo; +} + +template +template +WeakCallbackInfo *WeakCallbackInfo::unwrap( + NAN_WEAK_CALLBACK_DATA_TYPE_ data) { + void *parameter = data.GetParameter(); + WeakCallbackInfo *cbinfo = + static_cast*>(parameter); + cbinfo->isolate_ = data.GetIsolate(); + return cbinfo; +} + +#undef NAN_WEAK_CALLBACK_SIG_ +#undef NAN_WEAK_CALLBACK_DATA_TYPE_ +#else + +template +void WeakCallbackInfo::invoke(NAN_WEAK_CALLBACK_SIG_ data) { + WeakCallbackInfo *cbinfo = unwrap(data); + cbinfo->persistent_.Dispose(); + cbinfo->persistent_.Clear(); + cbinfo->callback_(*cbinfo); + delete cbinfo; +} + +template +WeakCallbackInfo *WeakCallbackInfo::unwrap( + NAN_WEAK_CALLBACK_DATA_TYPE_ data) { + WeakCallbackInfo *cbinfo = + static_cast*>(data); + cbinfo->isolate_ = v8::Isolate::GetCurrent(); + return cbinfo; +} + +#undef NAN_WEAK_CALLBACK_SIG_ +#undef NAN_WEAK_CALLBACK_DATA_TYPE_ +#endif + +#if defined(V8_MAJOR_VERSION) && (V8_MAJOR_VERSION > 4 || \ + (V8_MAJOR_VERSION == 4 && defined(V8_MINOR_VERSION) && V8_MINOR_VERSION >= 3)) +template +template +inline void Persistent::SetWeak( + P *parameter + , typename WeakCallbackInfo

::Callback callback + , WeakCallbackType type) { + WeakCallbackInfo

*wcbd; + if (type == WeakCallbackType::kParameter) { + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , parameter); + v8::PersistentBase::SetWeak( + wcbd + , WeakCallbackInfo

::template invokeparameter + , type); + } else { + v8::Local* self_v(reinterpret_cast*>(this)); + assert((*self_v)->IsObject()); + v8::Local self((*self_v).As()); + int count = self->InternalFieldCount(); + void *internal_fields[kInternalFieldsInWeakCallback] = {0, 0}; + for (int i = 0; i < count && i < kInternalFieldsInWeakCallback; i++) { + internal_fields[i] = self->GetAlignedPointerFromInternalField(i); + } + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , 0 + , internal_fields[0] + , internal_fields[1]); + self->SetAlignedPointerInInternalField(0, wcbd); + v8::PersistentBase::SetWeak( + static_cast*>(0) + , WeakCallbackInfo

::template invoketwofield + , type); + } +} +#elif NODE_MODULE_VERSION > IOJS_1_1_MODULE_VERSION +template +template +inline void Persistent::SetWeak( + P *parameter + , typename WeakCallbackInfo

::Callback callback + , WeakCallbackType type) { + WeakCallbackInfo

*wcbd; + if (type == WeakCallbackType::kParameter) { + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , parameter); + v8::PersistentBase::SetPhantom( + wcbd + , WeakCallbackInfo

::invokeparameter); + } else { + v8::Local* self_v(reinterpret_cast*>(this)); + assert((*self_v)->IsObject()); + v8::Local self((*self_v).As()); + int count = self->InternalFieldCount(); + void *internal_fields[kInternalFieldsInWeakCallback] = {0, 0}; + for (int i = 0; i < count && i < kInternalFieldsInWeakCallback; i++) { + internal_fields[i] = self->GetAlignedPointerFromInternalField(i); + } + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , 0 + , internal_fields[0] + , internal_fields[1]); + self->SetAlignedPointerInInternalField(0, wcbd); + v8::PersistentBase::SetPhantom( + static_cast*>(0) + , WeakCallbackInfo

::invoketwofield + , 0 + , count > 1 ? 1 : kNoInternalFieldIndex); + } +} +#elif NODE_MODULE_VERSION > NODE_0_12_MODULE_VERSION +template +template +inline void Persistent::SetWeak( + P *parameter + , typename WeakCallbackInfo

::Callback callback + , WeakCallbackType type) { + WeakCallbackInfo

*wcbd; + if (type == WeakCallbackType::kParameter) { + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , parameter); + v8::PersistentBase::SetPhantom( + wcbd + , WeakCallbackInfo

::invokeparameter); + } else { + v8::Local* self_v(reinterpret_cast*>(this)); + assert((*self_v)->IsObject()); + v8::Local self((*self_v).As()); + int count = self->InternalFieldCount(); + void *internal_fields[kInternalFieldsInWeakCallback] = {0, 0}; + for (int i = 0; i < count && i < kInternalFieldsInWeakCallback; i++) { + internal_fields[i] = self->GetAlignedPointerFromInternalField(i); + } + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , 0 + , internal_fields[0] + , internal_fields[1]); + self->SetAlignedPointerInInternalField(0, wcbd); + v8::PersistentBase::SetPhantom( + WeakCallbackInfo

::invoketwofield + , 0 + , count > 1 ? 1 : kNoInternalFieldIndex); + } +} +#elif NODE_MODULE_VERSION > NODE_0_10_MODULE_VERSION +template +template +inline void Persistent::SetWeak( + P *parameter + , typename WeakCallbackInfo

::Callback callback + , WeakCallbackType type) { + WeakCallbackInfo

*wcbd; + if (type == WeakCallbackType::kParameter) { + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , parameter); + v8::PersistentBase::SetWeak(wcbd, WeakCallbackInfo

::invoke); + } else { + v8::Local* self_v(reinterpret_cast*>(this)); + assert((*self_v)->IsObject()); + v8::Local self((*self_v).As()); + int count = self->InternalFieldCount(); + void *internal_fields[kInternalFieldsInWeakCallback] = {0, 0}; + for (int i = 0; i < count && i < kInternalFieldsInWeakCallback; i++) { + internal_fields[i] = self->GetAlignedPointerFromInternalField(i); + } + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , 0 + , internal_fields[0] + , internal_fields[1]); + v8::PersistentBase::SetWeak(wcbd, WeakCallbackInfo

::invoke); + } +} +#else +template +template +inline void PersistentBase::SetWeak( + P *parameter + , typename WeakCallbackInfo

::Callback callback + , WeakCallbackType type) { + WeakCallbackInfo

*wcbd; + if (type == WeakCallbackType::kParameter) { + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , parameter); + persistent.MakeWeak(wcbd, WeakCallbackInfo

::invoke); + } else { + v8::Local* self_v(reinterpret_cast*>(this)); + assert((*self_v)->IsObject()); + v8::Local self((*self_v).As()); + int count = self->InternalFieldCount(); + void *internal_fields[kInternalFieldsInWeakCallback] = {0, 0}; + for (int i = 0; i < count && i < kInternalFieldsInWeakCallback; i++) { + internal_fields[i] = self->GetPointerFromInternalField(i); + } + wcbd = new WeakCallbackInfo

( + reinterpret_cast*>(this) + , callback + , 0 + , internal_fields[0] + , internal_fields[1]); + persistent.MakeWeak(wcbd, WeakCallbackInfo

::invoke); + } +} +#endif + +#endif // NAN_WEAK_H_ diff --git a/node_modules/nan/package.json b/node_modules/nan/package.json new file mode 100644 index 00000000..7d4830cd --- /dev/null +++ b/node_modules/nan/package.json @@ -0,0 +1,37 @@ +{ + "name": "nan", + "version": "2.14.0", + "description": "Native Abstractions for Node.js: C++ header for Node 0.8 -> 11 compatibility", + "main": "include_dirs.js", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/nan.git" + }, + "scripts": { + "test": "tap --gc --stderr test/js/*-test.js", + "test:worker": "node --experimental-worker test/tap-as-worker.js --gc --stderr test/js/*-test.js", + "rebuild-tests": "node-gyp rebuild --msvs_version=2015 --directory test", + "docs": "doc/.build.sh" + }, + "contributors": [ + "Rod Vagg (https://github.com/rvagg)", + "Benjamin Byholm (https://github.com/kkoopa/)", + "Trevor Norris (https://github.com/trevnorris)", + "Nathan Rajlich (https://github.com/TooTallNate)", + "Brett Lawson (https://github.com/brett19)", + "Ben Noordhuis (https://github.com/bnoordhuis)", + "David Siegel (https://github.com/agnat)", + "Michael Ira Krufky (https://github.com/mkrufky)" + ], + "devDependencies": { + "bindings": "~1.2.1", + "commander": "^2.8.1", + "glob": "^5.0.14", + "request": "=2.81.0", + "node-gyp": "~3.6.2", + "readable-stream": "^2.1.4", + "tap": "~0.7.1", + "xtend": "~4.0.0" + }, + "license": "MIT" +} diff --git a/node_modules/nan/tools/1to2.js b/node_modules/nan/tools/1to2.js new file mode 100755 index 00000000..337f8bf2 --- /dev/null +++ b/node_modules/nan/tools/1to2.js @@ -0,0 +1,412 @@ +#!/usr/bin/env node +/********************************************************************* + * NAN - Native Abstractions for Node.js + * + * Copyright (c) 2018 NAN contributors + * + * MIT License + ********************************************************************/ + +var commander = require('commander'), + fs = require('fs'), + glob = require('glob'), + groups = [], + total = 0, + warning1 = '/* ERROR: Rewrite using Buffer */\n', + warning2 = '\\/\\* ERROR\\: Rewrite using Buffer \\*\\/\\n', + length, + i; + +fs.readFile(__dirname + '/package.json', 'utf8', function (err, data) { + if (err) { + throw err; + } + + commander + .version(JSON.parse(data).version) + .usage('[options] ') + .parse(process.argv); + + if (!process.argv.slice(2).length) { + commander.outputHelp(); + } +}); + +/* construct strings representing regular expressions + each expression contains a unique group allowing for identification of the match + the index of this key group, relative to the regular expression in question, + is indicated by the first array member */ + +/* simple substistutions, key group is the entire match, 0 */ +groups.push([0, [ + '_NAN_', + 'NODE_SET_METHOD', + 'NODE_SET_PROTOTYPE_METHOD', + 'NanAsciiString', + 'NanEscapeScope', + 'NanReturnValue', + 'NanUcs2String'].join('|')]); + +/* substitutions of parameterless macros, key group is 1 */ +groups.push([1, ['(', [ + 'NanEscapableScope', + 'NanReturnNull', + 'NanReturnUndefined', + 'NanScope'].join('|'), ')\\(\\)'].join('')]); + +/* replace TryCatch with NanTryCatch once, gobbling possible namespace, key group 2 */ +groups.push([2, '(?:(?:v8\\:\\:)?|(Nan)?)(TryCatch)']); + +/* NanNew("string") will likely not fail a ToLocalChecked(), key group 1 */ +groups.push([1, ['(NanNew)', '(\\("[^\\"]*"[^\\)]*\\))(?!\\.ToLocalChecked\\(\\))'].join('')]); + +/* Removed v8 APIs, warn that the code needs rewriting using node::Buffer, key group 2 */ +groups.push([2, ['(', warning2, ')?', '^.*?(', [ + 'GetIndexedPropertiesExternalArrayDataLength', + 'GetIndexedPropertiesExternalArrayData', + 'GetIndexedPropertiesExternalArrayDataType', + 'GetIndexedPropertiesPixelData', + 'GetIndexedPropertiesPixelDataLength', + 'HasIndexedPropertiesInExternalArrayData', + 'HasIndexedPropertiesInPixelData', + 'SetIndexedPropertiesToExternalArrayData', + 'SetIndexedPropertiesToPixelData'].join('|'), ')'].join('')]); + +/* No need for NanScope in V8-exposed methods, key group 2 */ +groups.push([2, ['((', [ + 'NAN_METHOD', + 'NAN_GETTER', + 'NAN_SETTER', + 'NAN_PROPERTY_GETTER', + 'NAN_PROPERTY_SETTER', + 'NAN_PROPERTY_ENUMERATOR', + 'NAN_PROPERTY_DELETER', + 'NAN_PROPERTY_QUERY', + 'NAN_INDEX_GETTER', + 'NAN_INDEX_SETTER', + 'NAN_INDEX_ENUMERATOR', + 'NAN_INDEX_DELETER', + 'NAN_INDEX_QUERY'].join('|'), ')\\([^\\)]*\\)\\s*\\{)\\s*NanScope\\(\\)\\s*;'].join('')]); + +/* v8::Value::ToXXXXXXX returns v8::MaybeLocal, key group 3 */ +groups.push([3, ['([\\s\\(\\)])([^\\s\\(\\)]+)->(', [ + 'Boolean', + 'Number', + 'String', + 'Object', + 'Integer', + 'Uint32', + 'Int32'].join('|'), ')\\('].join('')]); + +/* v8::Value::XXXXXXXValue returns v8::Maybe, key group 3 */ +groups.push([3, ['([\\s\\(\\)])([^\\s\\(\\)]+)->((?:', [ + 'Boolean', + 'Number', + 'Integer', + 'Uint32', + 'Int32'].join('|'), ')Value)\\('].join('')]); + +/* NAN_WEAK_CALLBACK macro was removed, write out callback definition, key group 1 */ +groups.push([1, '(NAN_WEAK_CALLBACK)\\(([^\\s\\)]+)\\)']); + +/* node::ObjectWrap and v8::Persistent have been replaced with Nan implementations, key group 1 */ +groups.push([1, ['(', [ + 'NanDisposePersistent', + 'NanObjectWrapHandle'].join('|'), ')\\s*\\(\\s*([^\\s\\)]+)'].join('')]); + +/* Since NanPersistent there is no need for NanMakeWeakPersistent, key group 1 */ +groups.push([1, '(NanMakeWeakPersistent)\\s*\\(\\s*([^\\s,]+)\\s*,\\s*']); + +/* Many methods of v8::Object and others now return v8::MaybeLocal, key group 3 */ +groups.push([3, ['([\\s])([^\\s]+)->(', [ + 'GetEndColumn', + 'GetFunction', + 'GetLineNumber', + 'NewInstance', + 'GetPropertyNames', + 'GetOwnPropertyNames', + 'GetSourceLine', + 'GetStartColumn', + 'ObjectProtoToString', + 'ToArrayIndex', + 'ToDetailString', + 'CallAsConstructor', + 'CallAsFunction', + 'CloneElementAt', + 'Delete', + 'ForceSet', + 'Get', + 'GetPropertyAttributes', + 'GetRealNamedProperty', + 'GetRealNamedPropertyInPrototypeChain', + 'Has', + 'HasOwnProperty', + 'HasRealIndexedProperty', + 'HasRealNamedCallbackProperty', + 'HasRealNamedProperty', + 'Set', + 'SetAccessor', + 'SetIndexedPropertyHandler', + 'SetNamedPropertyHandler', + 'SetPrototype'].join('|'), ')\\('].join('')]); + +/* You should get an error if any of these fail anyways, + or handle the error better, it is indicated either way, key group 2 */ +groups.push([2, ['NanNew(<(?:v8\\:\\:)?(', ['Date', 'String', 'RegExp'].join('|'), ')>)(\\([^\\)]*\\))(?!\\.ToLocalChecked\\(\\))'].join('')]); + +/* v8::Value::Equals now returns a v8::Maybe, key group 3 */ +groups.push([3, '([\\s\\(\\)])([^\\s\\(\\)]+)->(Equals)\\(([^\\s\\)]+)']); + +/* NanPersistent makes this unnecessary, key group 1 */ +groups.push([1, '(NanAssignPersistent)(?:]+>)?\\(([^,]+),\\s*']); + +/* args has been renamed to info, key group 2 */ +groups.push([2, '(\\W)(args)(\\W)']) + +/* node::ObjectWrap was replaced with NanObjectWrap, key group 2 */ +groups.push([2, '(\\W)(?:node\\:\\:)?(ObjectWrap)(\\W)']); + +/* v8::Persistent was replaced with NanPersistent, key group 2 */ +groups.push([2, '(\\W)(?:v8\\:\\:)?(Persistent)(\\W)']); + +/* counts the number of capturing groups in a well-formed regular expression, + ignoring non-capturing groups and escaped parentheses */ +function groupcount(s) { + var positive = s.match(/\((?!\?)/g), + negative = s.match(/\\\(/g); + return (positive ? positive.length : 0) - (negative ? negative.length : 0); +} + +/* compute the absolute position of each key group in the joined master RegExp */ +for (i = 1, length = groups.length; i < length; i++) { + total += groupcount(groups[i - 1][1]); + groups[i][0] += total; +} + +/* create the master RegExp, whis is the union of all the groups' expressions */ +master = new RegExp(groups.map(function (a) { return a[1]; }).join('|'), 'gm'); + +/* replacement function for String.replace, receives 21 arguments */ +function replace() { + /* simple expressions */ + switch (arguments[groups[0][0]]) { + case '_NAN_': + return 'NAN_'; + case 'NODE_SET_METHOD': + return 'NanSetMethod'; + case 'NODE_SET_PROTOTYPE_METHOD': + return 'NanSetPrototypeMethod'; + case 'NanAsciiString': + return 'NanUtf8String'; + case 'NanEscapeScope': + return 'scope.Escape'; + case 'NanReturnNull': + return 'info.GetReturnValue().SetNull'; + case 'NanReturnValue': + return 'info.GetReturnValue().Set'; + case 'NanUcs2String': + return 'v8::String::Value'; + default: + } + + /* macros without arguments */ + switch (arguments[groups[1][0]]) { + case 'NanEscapableScope': + return 'NanEscapableScope scope' + case 'NanReturnUndefined': + return 'return'; + case 'NanScope': + return 'NanScope scope'; + default: + } + + /* TryCatch, emulate negative backref */ + if (arguments[groups[2][0]] === 'TryCatch') { + return arguments[groups[2][0] - 1] ? arguments[0] : 'NanTryCatch'; + } + + /* NanNew("foo") --> NanNew("foo").ToLocalChecked() */ + if (arguments[groups[3][0]] === 'NanNew') { + return [arguments[0], '.ToLocalChecked()'].join(''); + } + + /* insert warning for removed functions as comment on new line above */ + switch (arguments[groups[4][0]]) { + case 'GetIndexedPropertiesExternalArrayData': + case 'GetIndexedPropertiesExternalArrayDataLength': + case 'GetIndexedPropertiesExternalArrayDataType': + case 'GetIndexedPropertiesPixelData': + case 'GetIndexedPropertiesPixelDataLength': + case 'HasIndexedPropertiesInExternalArrayData': + case 'HasIndexedPropertiesInPixelData': + case 'SetIndexedPropertiesToExternalArrayData': + case 'SetIndexedPropertiesToPixelData': + return arguments[groups[4][0] - 1] ? arguments[0] : [warning1, arguments[0]].join(''); + default: + } + + /* remove unnecessary NanScope() */ + switch (arguments[groups[5][0]]) { + case 'NAN_GETTER': + case 'NAN_METHOD': + case 'NAN_SETTER': + case 'NAN_INDEX_DELETER': + case 'NAN_INDEX_ENUMERATOR': + case 'NAN_INDEX_GETTER': + case 'NAN_INDEX_QUERY': + case 'NAN_INDEX_SETTER': + case 'NAN_PROPERTY_DELETER': + case 'NAN_PROPERTY_ENUMERATOR': + case 'NAN_PROPERTY_GETTER': + case 'NAN_PROPERTY_QUERY': + case 'NAN_PROPERTY_SETTER': + return arguments[groups[5][0] - 1]; + default: + } + + /* Value converstion */ + switch (arguments[groups[6][0]]) { + case 'Boolean': + case 'Int32': + case 'Integer': + case 'Number': + case 'Object': + case 'String': + case 'Uint32': + return [arguments[groups[6][0] - 2], 'NanTo(', arguments[groups[6][0] - 1]].join(''); + default: + } + + /* other value conversion */ + switch (arguments[groups[7][0]]) { + case 'BooleanValue': + return [arguments[groups[7][0] - 2], 'NanTo(', arguments[groups[7][0] - 1]].join(''); + case 'Int32Value': + return [arguments[groups[7][0] - 2], 'NanTo(', arguments[groups[7][0] - 1]].join(''); + case 'IntegerValue': + return [arguments[groups[7][0] - 2], 'NanTo(', arguments[groups[7][0] - 1]].join(''); + case 'Uint32Value': + return [arguments[groups[7][0] - 2], 'NanTo(', arguments[groups[7][0] - 1]].join(''); + default: + } + + /* NAN_WEAK_CALLBACK */ + if (arguments[groups[8][0]] === 'NAN_WEAK_CALLBACK') { + return ['template\nvoid ', + arguments[groups[8][0] + 1], '(const NanWeakCallbackInfo &data)'].join(''); + } + + /* use methods on NAN classes instead */ + switch (arguments[groups[9][0]]) { + case 'NanDisposePersistent': + return [arguments[groups[9][0] + 1], '.Reset('].join(''); + case 'NanObjectWrapHandle': + return [arguments[groups[9][0] + 1], '->handle('].join(''); + default: + } + + /* use method on NanPersistent instead */ + if (arguments[groups[10][0]] === 'NanMakeWeakPersistent') { + return arguments[groups[10][0] + 1] + '.SetWeak('; + } + + /* These return Maybes, the upper ones take no arguments */ + switch (arguments[groups[11][0]]) { + case 'GetEndColumn': + case 'GetFunction': + case 'GetLineNumber': + case 'GetOwnPropertyNames': + case 'GetPropertyNames': + case 'GetSourceLine': + case 'GetStartColumn': + case 'NewInstance': + case 'ObjectProtoToString': + case 'ToArrayIndex': + case 'ToDetailString': + return [arguments[groups[11][0] - 2], 'Nan', arguments[groups[11][0]], '(', arguments[groups[11][0] - 1]].join(''); + case 'CallAsConstructor': + case 'CallAsFunction': + case 'CloneElementAt': + case 'Delete': + case 'ForceSet': + case 'Get': + case 'GetPropertyAttributes': + case 'GetRealNamedProperty': + case 'GetRealNamedPropertyInPrototypeChain': + case 'Has': + case 'HasOwnProperty': + case 'HasRealIndexedProperty': + case 'HasRealNamedCallbackProperty': + case 'HasRealNamedProperty': + case 'Set': + case 'SetAccessor': + case 'SetIndexedPropertyHandler': + case 'SetNamedPropertyHandler': + case 'SetPrototype': + return [arguments[groups[11][0] - 2], 'Nan', arguments[groups[11][0]], '(', arguments[groups[11][0] - 1], ', '].join(''); + default: + } + + /* Automatic ToLocalChecked(), take it or leave it */ + switch (arguments[groups[12][0]]) { + case 'Date': + case 'String': + case 'RegExp': + return ['NanNew', arguments[groups[12][0] - 1], arguments[groups[12][0] + 1], '.ToLocalChecked()'].join(''); + default: + } + + /* NanEquals is now required for uniformity */ + if (arguments[groups[13][0]] === 'Equals') { + return [arguments[groups[13][0] - 1], 'NanEquals(', arguments[groups[13][0] - 1], ', ', arguments[groups[13][0] + 1]].join(''); + } + + /* use method on replacement class instead */ + if (arguments[groups[14][0]] === 'NanAssignPersistent') { + return [arguments[groups[14][0] + 1], '.Reset('].join(''); + } + + /* args --> info */ + if (arguments[groups[15][0]] === 'args') { + return [arguments[groups[15][0] - 1], 'info', arguments[groups[15][0] + 1]].join(''); + } + + /* ObjectWrap --> NanObjectWrap */ + if (arguments[groups[16][0]] === 'ObjectWrap') { + return [arguments[groups[16][0] - 1], 'NanObjectWrap', arguments[groups[16][0] + 1]].join(''); + } + + /* Persistent --> NanPersistent */ + if (arguments[groups[17][0]] === 'Persistent') { + return [arguments[groups[17][0] - 1], 'NanPersistent', arguments[groups[17][0] + 1]].join(''); + } + + /* This should not happen. A switch is probably missing a case if it does. */ + throw 'Unhandled match: ' + arguments[0]; +} + +/* reads a file, runs replacement and writes it back */ +function processFile(file) { + fs.readFile(file, {encoding: 'utf8'}, function (err, data) { + if (err) { + throw err; + } + + /* run replacement twice, might need more runs */ + fs.writeFile(file, data.replace(master, replace).replace(master, replace), function (err) { + if (err) { + throw err; + } + }); + }); +} + +/* process file names from command line and process the identified files */ +for (i = 2, length = process.argv.length; i < length; i++) { + glob(process.argv[i], function (err, matches) { + if (err) { + throw err; + } + matches.forEach(processFile); + }); +} diff --git a/node_modules/nan/tools/README.md b/node_modules/nan/tools/README.md new file mode 100644 index 00000000..7f07e4b8 --- /dev/null +++ b/node_modules/nan/tools/README.md @@ -0,0 +1,14 @@ +1to2 naively converts source code files from NAN 1 to NAN 2. There will be erroneous conversions, +false positives and missed opportunities. The input files are rewritten in place. Make sure that +you have backups. You will have to manually review the changes afterwards and do some touchups. + +```sh +$ tools/1to2.js + + Usage: 1to2 [options] + + Options: + + -h, --help output usage information + -V, --version output the version number +``` diff --git a/node_modules/nan/tools/package.json b/node_modules/nan/tools/package.json new file mode 100644 index 00000000..2dcdd789 --- /dev/null +++ b/node_modules/nan/tools/package.json @@ -0,0 +1,19 @@ +{ + "name": "1to2", + "version": "1.0.0", + "description": "NAN 1 -> 2 Migration Script", + "main": "1to2.js", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/nan.git" + }, + "contributors": [ + "Benjamin Byholm (https://github.com/kkoopa/)", + "Mathias Küsel (https://github.com/mathiask88/)" + ], + "dependencies": { + "glob": "~5.0.10", + "commander": "~2.8.1" + }, + "license": "MIT" +} diff --git a/node_modules/nanomatch/CHANGELOG.md b/node_modules/nanomatch/CHANGELOG.md new file mode 100644 index 00000000..8c3aead9 --- /dev/null +++ b/node_modules/nanomatch/CHANGELOG.md @@ -0,0 +1,57 @@ +## History + +### key + +Changelog entries are classified using the following labels _(from [keep-a-changelog][]_): + +- `added`: for new features +- `changed`: for changes in existing functionality +- `deprecated`: for once-stable features removed in upcoming releases +- `removed`: for deprecated features removed in this release +- `fixed`: for any bug fixes +- `bumped`: updated dependencies, only minor or higher will be listed. + +### [1.1.0] - 2017-04-11 + +**Fixed** + +- adds support for unclosed quotes + +**Added** + +- adds support for `options.noglobstar` + +### [1.0.4] - 2017-04-06 + +Housekeeping updates. Adds documentation section about escaping, cleans up utils. + +### [1.0.3] - 2017-04-06 + +This release includes fixes for windows path edge cases and other improvements for stricter adherence to bash spec. + +**Fixed** + +- More windows path edge cases + +**Added** + +- Support for bash-like quoted strings for escaping sequences of characters, such as `foo/"**"/bar` where `**` should be matched literally and not evaluated as special characters. + +### [1.0.1] - 2016-12-12 + +**Added** + +- Support for windows path edge cases where backslashes are used in brackets or other unusual combinations. + +### [1.0.0] - 2016-12-12 + +Stable release. + +### [0.1.0] - 2016-10-08 + +First release. + +[Unreleased]: https://github.com/jonschlinkert/nanomatch/compare/0.1.0...HEAD +[0.2.0]: https://github.com/jonschlinkert/nanomatch/compare/0.1.0...0.2.0 + +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog diff --git a/node_modules/nanomatch/LICENSE b/node_modules/nanomatch/LICENSE new file mode 100644 index 00000000..7c9987bc --- /dev/null +++ b/node_modules/nanomatch/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/nanomatch/README.md b/node_modules/nanomatch/README.md new file mode 100644 index 00000000..bdd35a94 --- /dev/null +++ b/node_modules/nanomatch/README.md @@ -0,0 +1,1148 @@ +# nanomatch [![NPM version](https://img.shields.io/npm/v/nanomatch.svg?style=flat)](https://www.npmjs.com/package/nanomatch) [![NPM monthly downloads](https://img.shields.io/npm/dm/nanomatch.svg?style=flat)](https://npmjs.org/package/nanomatch) [![NPM total downloads](https://img.shields.io/npm/dt/nanomatch.svg?style=flat)](https://npmjs.org/package/nanomatch) [![Linux Build Status](https://img.shields.io/travis/micromatch/nanomatch.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/nanomatch) [![Windows Build Status](https://img.shields.io/appveyor/ci/micromatch/nanomatch.svg?style=flat&label=AppVeyor)](https://ci.appveyor.com/project/micromatch/nanomatch) + +> Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash 4.3 wildcard support only (no support for exglobs, posix brackets or braces) + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Table of Contents + +

+Details + +- [Install](#install) +- [What is nanomatch?](#what-is-nanomatch) +- [Getting started](#getting-started) + * [Installing nanomatch](#installing-nanomatch) + * [Usage](#usage) +- [Documentation](#documentation) + * [Escaping](#escaping) +- [API](#api) +- [Options](#options) + * [options.basename](#optionsbasename) + * [options.bash](#optionsbash) + * [options.cache](#optionscache) + * [options.dot](#optionsdot) + * [options.failglob](#optionsfailglob) + * [options.ignore](#optionsignore) + * [options.matchBase](#optionsmatchbase) + * [options.nocase](#optionsnocase) + * [options.nodupes](#optionsnodupes) + * [options.noglobstar](#optionsnoglobstar) + * [options.nonegate](#optionsnonegate) + * [options.nonull](#optionsnonull) + * [options.nullglob](#optionsnullglob) + * [options.slash](#optionsslash) + * [options.star](#optionsstar) + * [options.snapdragon](#optionssnapdragon) + * [options.sourcemap](#optionssourcemap) + * [options.unescape](#optionsunescape) + * [options.unixify](#optionsunixify) +- [Features](#features) +- [Bash expansion libs](#bash-expansion-libs) +- [Benchmarks](#benchmarks) + * [Running benchmarks](#running-benchmarks) + * [Nanomatch vs. Minimatch vs. Multimatch](#nanomatch-vs-minimatch-vs-multimatch) +- [About](#about) + +
+ +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save nanomatch +``` + +
+Release history + +## History + +### key + +Changelog entries are classified using the following labels _(from [keep-a-changelog](https://github.com/olivierlacan/keep-a-changelog)_): + +* `added`: for new features +* `changed`: for changes in existing functionality +* `deprecated`: for once-stable features removed in upcoming releases +* `removed`: for deprecated features removed in this release +* `fixed`: for any bug fixes +* `bumped`: updated dependencies, only minor or higher will be listed. + +### [1.1.0](https://github.com/micromatch/nanomatch/compare/1.0.4...1.1.0) - 2017-04-11 + +**Fixed** + +* adds support for unclosed quotes + +**Added** + +* adds support for `options.noglobstar` + +### [1.0.4](https://github.com/micromatch/nanomatch/compare/1.0.3...1.0.4) - 2017-04-06 + +Housekeeping updates. Adds documentation section about escaping, cleans up utils. + +### [1.0.3](https://github.com/micromatch/nanomatch/compare/1.0.1...1.0.3) - 2017-04-06 + +This release includes fixes for windows path edge cases and other improvements for stricter adherence to bash spec. + +**Fixed** + +* More windows path edge cases + +**Added** + +* Support for bash-like quoted strings for escaping sequences of characters, such as `foo/"**"/bar` where `**` should be matched literally and not evaluated as special characters. + +### [1.0.1](https://github.com/micromatch/nanomatch/compare/1.0.0...1.0.1) - 2016-12-12 + +**Added** + +* Support for windows path edge cases where backslashes are used in brackets or other unusual combinations. + +### [1.0.0](https://github.com/micromatch/nanomatch/compare/0.1.0...1.0.0) - 2016-12-12 + +Stable release. + +### [0.1.0] - 2016-10-08 + +First release. + +
+ +## What is nanomatch? + +Nanomatch is a fast and accurate glob matcher with full support for standard Bash glob features, including the following "metacharacters": `*`, `**`, `?` and `[...]`. + +**Learn more** + +* [Getting started](#getting-started): learn how to install and begin using nanomatch +* [Features](#features): jump to info about supported patterns, and a glob matching reference +* [API documentation](#api): jump to available options and methods +* [Unit tests](test): visit unit tests. there is no better way to learn a code library than spending time the unit tests. Nanomatch has 36,000 unit tests - go become a glob matching ninja! + +
+How is this different? + +**Speed and accuracy** + +Nanomatch uses [snapdragon](https://github.com/jonschlinkert/snapdragon) for parsing and compiling globs, which results in: + +* Granular control over the entire conversion process in a way that is easy to understand, reason about, and customize. +* Faster matching, from a combination of optimized glob patterns and (optional) caching. +* Much greater accuracy than minimatch. In fact, nanomatch passes _all of the spec tests_ from bash, including some that bash still fails. However, since there is no real specification for globs, if you encounter a pattern that yields unexpected match results [after researching previous issues](../../issues), [please let us know](../../issues/new). + +**Basic globbing only** + +Nanomatch supports [basic globbing only](#features), which is limited to `*`, `**`, `?` and regex-like brackets. + +If you need support for the other [bash "expansion" types](#bash-expansion-libs) (in addition to the wildcard matching provided by nanomatch), consider using [micromatch](https://github.com/micromatch/micromatch) instead. _(micromatch >=3.0.0 uses the nanomatch parser and compiler for basic glob matching)_ + +
+ +## Getting started + +### Installing nanomatch + +**Install with [yarn](https://yarnpkg.com/)** + +```sh +$ yarn add nanomatch +``` + +**Install with [npm](https://npmjs.com)** + +```sh +$ npm install nanomatch +``` + +### Usage + +Add nanomatch to your project using node's `require()` system: + +```js +var nanomatch = require('nanomatch'); + +// the main export is a function that takes an array of strings to match +// and a string or array of patterns to use for matching +nanomatch(list, patterns[, options]); +``` + +**Params** + +* `list` **{String|Array}**: List of strings to perform matches against. This is often a list of file paths. +* `patterns` **{String|Array}**: One or more [glob paterns](#features) to use for matching. +* `options` **{Object}**: Any [supported options](#options) may be passed + +**Examples** + +```js +var nm = require('nanomatch'); +console.log(nm(['a', 'b/b', 'c/c/c'], '*')); +//=> ['a'] + +console.log(nm(['a', 'b/b', 'c/c/c'], '*/*')); +//=> ['b/b'] + +console.log(nm(['a', 'b/b', 'c/c/c'], '**')); +//=> ['a', 'b/b', 'c/c/c'] +``` + +See the [API documentation](#api) for available methods and [options](https://github.com/einaros/options.js). + +## Documentation + +### Escaping + +_Backslashes and quotes_ can be used to escape characters, forcing nanomatch to regard those characters as a literal characters. + +**Backslashes** + +Use backslashes to escape single characters. For example, the following pattern would match `foo/*/bar` exactly: + +```js +'foo/\*/bar' +``` + +The following pattern would match `foo/` followed by a literal `*`, followed by zero or more of any characters besides `/`, followed by `/bar`. + +```js +'foo/\**/bar' +``` + +**Quoted strings** + +Use single or double quotes to escape sequences of characters. For example, the following patterns would match `foo/**/bar` exactly: + +```js +'foo/"**"/bar' +'foo/\'**\'/bar' +"foo/'**'/bar" +``` + +**Matching literal quotes** + +If you need to match quotes literally, you can escape them as well. For example, the following will match `foo/"*"/bar`, `foo/"a"/bar`, `foo/"b"/bar`, or `foo/"c"/bar`: + +```js +'foo/\\"*\\"/bar' +``` + +And the following will match `foo/'*'/bar`, `foo/'a'/bar`, `foo/'b'/bar`, or `foo/'c'/bar`: + +```js +'foo/\\\'*\\\'/bar' +``` + +## API + +### [nanomatch](index.js#L40) + +The main function takes a list of strings and one or more glob patterns to use for matching. + +**Params** + +* `list` **{Array}**: A list of strings to match +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array}**: Returns an array of matches + +**Example** + +```js +var nm = require('nanomatch'); +nm(list, patterns[, options]); + +console.log(nm(['a.js', 'a.txt'], ['*.js'])); +//=> [ 'a.js' ] +``` + +### [.match](index.js#L106) + +Similar to the main function, but `pattern` must be a string. + +**Params** + +* `list` **{Array}**: Array of strings to match +* `pattern` **{String}**: Glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array}**: Returns an array of matches + +**Example** + +```js +var nm = require('nanomatch'); +nm.match(list, pattern[, options]); + +console.log(nm.match(['a.a', 'a.aa', 'a.b', 'a.c'], '*.a')); +//=> ['a.a', 'a.aa'] +``` + +### [.isMatch](index.js#L167) + +Returns true if the specified `string` matches the given glob `pattern`. + +**Params** + +* `string` **{String}**: String to match +* `pattern` **{String}**: Glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if the string matches the glob pattern. + +**Example** + +```js +var nm = require('nanomatch'); +nm.isMatch(string, pattern[, options]); + +console.log(nm.isMatch('a.a', '*.a')); +//=> true +console.log(nm.isMatch('a.b', '*.a')); +//=> false +``` + +### [.some](index.js#L205) + +Returns true if some of the elements in the given `list` match any of the given glob `patterns`. + +**Params** + +* `list` **{String|Array}**: The string or array of strings to test. Returns as soon as the first match is found. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +var nm = require('nanomatch'); +nm.some(list, patterns[, options]); + +console.log(nm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); +// true +console.log(nm.some(['foo.js'], ['*.js', '!foo.js'])); +// false +``` + +### [.every](index.js#L243) + +Returns true if every element in the given `list` matches at least one of the given glob `patterns`. + +**Params** + +* `list` **{String|Array}**: The string or array of strings to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +var nm = require('nanomatch'); +nm.every(list, patterns[, options]); + +console.log(nm.every('foo.js', ['foo.js'])); +// true +console.log(nm.every(['foo.js', 'bar.js'], ['*.js'])); +// true +console.log(nm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); +// false +console.log(nm.every(['foo.js'], ['*.js', '!foo.js'])); +// false +``` + +### [.any](index.js#L277) + +Returns true if **any** of the given glob `patterns` match the specified `string`. + +**Params** + +* `str` **{String|Array}**: The string to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +var nm = require('nanomatch'); +nm.any(string, patterns[, options]); + +console.log(nm.any('a.a', ['b.*', '*.a'])); +//=> true +console.log(nm.any('a.a', 'b.*')); +//=> false +``` + +### [.all](index.js#L325) + +Returns true if **all** of the given `patterns` match the specified string. + +**Params** + +* `str` **{String|Array}**: The string to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +var nm = require('nanomatch'); +nm.all(string, patterns[, options]); + +console.log(nm.all('foo.js', ['foo.js'])); +// true + +console.log(nm.all('foo.js', ['*.js', '!foo.js'])); +// false + +console.log(nm.all('foo.js', ['*.js', 'foo.js'])); +// true + +console.log(nm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); +// true +``` + +### [.not](index.js#L359) + +Returns a list of strings that _**do not match any**_ of the given `patterns`. + +**Params** + +* `list` **{Array}**: Array of strings to match. +* `patterns` **{String|Array}**: One or more glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array}**: Returns an array of strings that **do not match** the given patterns. + +**Example** + +```js +var nm = require('nanomatch'); +nm.not(list, patterns[, options]); + +console.log(nm.not(['a.a', 'b.b', 'c.c'], '*.a')); +//=> ['b.b', 'c.c'] +``` + +### [.contains](index.js#L394) + +Returns true if the given `string` contains the given pattern. Similar to [.isMatch](#isMatch) but the pattern can match any part of the string. + +**Params** + +* `str` **{String}**: The string to match. +* `patterns` **{String|Array}**: Glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if the patter matches any part of `str`. + +**Example** + +```js +var nm = require('nanomatch'); +nm.contains(string, pattern[, options]); + +console.log(nm.contains('aa/bb/cc', '*b')); +//=> true +console.log(nm.contains('aa/bb/cc', '*d')); +//=> false +``` + +### [.matchKeys](index.js#L450) + +Filter the keys of the given object with the given `glob` pattern and `options`. Does not attempt to match nested keys. If you need this feature, use [glob-object](https://github.com/jonschlinkert/glob-object) instead. + +**Params** + +* `object` **{Object}**: The object with keys to filter. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Object}**: Returns an object with only keys that match the given patterns. + +**Example** + +```js +var nm = require('nanomatch'); +nm.matchKeys(object, patterns[, options]); + +var obj = { aa: 'a', ab: 'b', ac: 'c' }; +console.log(nm.matchKeys(obj, '*b')); +//=> { ab: 'b' } +``` + +### [.matcher](index.js#L479) + +Returns a memoized matcher function from the given glob `pattern` and `options`. The returned function takes a string to match as its only argument and returns true if the string is a match. + +**Params** + +* `pattern` **{String}**: Glob pattern +* `options` **{Object}**: See available [options](#options) for changing how matches are performed. +* `returns` **{Function}**: Returns a matcher function. + +**Example** + +```js +var nm = require('nanomatch'); +nm.matcher(pattern[, options]); + +var isMatch = nm.matcher('*.!(*a)'); +console.log(isMatch('a.a')); +//=> false +console.log(isMatch('a.b')); +//=> true +``` + +### [.capture](index.js#L560) + +Returns an array of matches captured by `pattern` in `string, or`null` if the pattern did not match. + +**Params** + +* `pattern` **{String}**: Glob pattern to use for matching. +* `string` **{String}**: String to match +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns an array of captures if the string matches the glob pattern, otherwise `null`. + +**Example** + +```js +var nm = require('nanomatch'); +nm.capture(pattern, string[, options]); + +console.log(nm.capture('test/*.js', 'test/foo.js')); +//=> ['foo'] +console.log(nm.capture('test/*.js', 'foo/bar.css')); +//=> null +``` + +### [.makeRe](index.js#L595) + +Create a regular expression from the given glob `pattern`. + +**Params** + +* `pattern` **{String}**: A glob pattern to convert to regex. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed. +* `returns` **{RegExp}**: Returns a regex created from the given pattern. + +**Example** + +```js +var nm = require('nanomatch'); +nm.makeRe(pattern[, options]); + +console.log(nm.makeRe('*.js')); +//=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ +``` + +### [.create](index.js#L658) + +Parses the given glob `pattern` and returns an object with the compiled `output` and optional source `map`. + +**Params** + +* `pattern` **{String}**: Glob pattern to parse and compile. +* `options` **{Object}**: Any [options](#options) to change how parsing and compiling is performed. +* `returns` **{Object}**: Returns an object with the parsed AST, compiled string and optional source map. + +**Example** + +```js +var nm = require('nanomatch'); +nm.create(pattern[, options]); + +console.log(nm.create('abc/*.js')); +// { options: { source: 'string', sourcemap: true }, +// state: {}, +// compilers: +// { ... }, +// output: '(\\.[\\\\\\/])?abc\\/(?!\\.)(?=.)[^\\/]*?\\.js', +// ast: +// { type: 'root', +// errors: [], +// nodes: +// [ ... ], +// dot: false, +// input: 'abc/*.js' }, +// parsingErrors: [], +// map: +// { version: 3, +// sources: [ 'string' ], +// names: [], +// mappings: 'AAAA,GAAG,EAAC,kBAAC,EAAC,EAAE', +// sourcesContent: [ 'abc/*.js' ] }, +// position: { line: 1, column: 28 }, +// content: {}, +// files: {}, +// idx: 6 } +``` + +### [.parse](index.js#L697) + +Parse the given `str` with the given `options`. + +**Params** + +* `str` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an AST + +**Example** + +```js +var nm = require('nanomatch'); +nm.parse(pattern[, options]); + +var ast = nm.parse('a/{b,c}/d'); +console.log(ast); +// { type: 'root', +// errors: [], +// input: 'a/{b,c}/d', +// nodes: +// [ { type: 'bos', val: '' }, +// { type: 'text', val: 'a/' }, +// { type: 'brace', +// nodes: +// [ { type: 'brace.open', val: '{' }, +// { type: 'text', val: 'b,c' }, +// { type: 'brace.close', val: '}' } ] }, +// { type: 'text', val: '/d' }, +// { type: 'eos', val: '' } ] } +``` + +### [.compile](index.js#L745) + +Compile the given `ast` or string with the given `options`. + +**Params** + +* `ast` **{Object|String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object that has an `output` property with the compiled string. + +**Example** + +```js +var nm = require('nanomatch'); +nm.compile(ast[, options]); + +var ast = nm.parse('a/{b,c}/d'); +console.log(nm.compile(ast)); +// { options: { source: 'string' }, +// state: {}, +// compilers: +// { eos: [Function], +// noop: [Function], +// bos: [Function], +// brace: [Function], +// 'brace.open': [Function], +// text: [Function], +// 'brace.close': [Function] }, +// output: [ 'a/(b|c)/d' ], +// ast: +// { ... }, +// parsingErrors: [] } +``` + +### [.clearCache](index.js#L768) + +Clear the regex cache. + +**Example** + +```js +nm.clearCache(); +``` + +## Options + +
+basename + +### options.basename + +Allow glob patterns without slashes to match a file path based on its basename. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `matchBase`. + +Type: `boolean` + +Default: `false` + +**Example** + +```js +nm(['a/b.js', 'a/c.md'], '*.js'); +//=> [] + +nm(['a/b.js', 'a/c.md'], '*.js', {matchBase: true}); +//=> ['a/b.js'] +``` + +
+ +
+bash + +### options.bash + +Enabled by default, this option enforces bash-like behavior with stars immediately following a bracket expression. Bash bracket expressions are similar to regex character classes, but unlike regex, a star following a bracket expression **does not repeat the bracketed characters**. Instead, the star is treated the same as an other star. + +Type: `boolean` + +Default: `true` + +**Example** + +```js +var files = ['abc', 'ajz']; +console.log(nm(files, '[a-c]*')); +//=> ['abc', 'ajz'] + +console.log(nm(files, '[a-c]*', {bash: false})); +``` + +
+ +
+cache + +### options.cache + +Disable regex and function memoization. + +Type: `boolean` + +Default: `undefined` + +
+ +
+dot + +### options.dot + +Match dotfiles. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `dot`. + +Type: `boolean` + +Default: `false` + +
+ +
+failglob + +### options.failglob + +Similar to the `--failglob` behavior in Bash, throws an error when no matches are found. + +Type: `boolean` + +Default: `undefined` + +
+ +
+ignore + +### options.ignore + +String or array of glob patterns to match files to ignore. + +Type: `String|Array` + +Default: `undefined` + +
+ +
+matchBase + +### options.matchBase + +Alias for [options.basename](#options-basename). + +
+ +
+nocase + +### options.nocase + +Use a case-insensitive regex for matching files. Same behavior as [minimatch](https://github.com/isaacs/minimatch). + +Type: `boolean` + +Default: `undefined` + +
+ +
+nodupes + +### options.nodupes + +Remove duplicate elements from the result array. + +Type: `boolean` + +Default: `true` (enabled by default) + +**Example** + +Example of using the `unescape` and `nodupes` options together: + +```js +nm.match(['a/b/c', 'a/b/c'], '**'); +//=> ['abc'] + +nm.match(['a/b/c', 'a/b/c'], '**', {nodupes: false}); +//=> ['a/b/c', 'a/b/c'] +``` + +
+ +
+nonegate + +### options.noglobstar + +Disable matching with globstars (`**`). + +Type: `boolean` + +Default: `undefined` + +```js +nm(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**'); +//=> ['a/b', 'a/b/c', 'a/b/c/d'] + +nm(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**', {noglobstar: true}); +//=> ['a/b'] +``` + +
+ +
+nonegate + +### options.nonegate + +Disallow negation (`!`) patterns, and treat leading `!` as a literal character to match. + +Type: `boolean` + +Default: `undefined` + +
+ +
+nonull + +### options.nonull + +Alias for [options.nullglob](#options-nullglob). + +
+ +
+nullglob + +### options.nullglob + +If `true`, when no matches are found the actual (arrayified) glob pattern is returned instead of an empty array. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `nonull`. + +Type: `boolean` + +Default: `undefined` + +
+ +
+slash + +### options.slash + +Customize the slash character(s) to use for matching. + +Type: `string|function` + +Default: `[/\\]` (forward slash and backslash) + +
+ +
+star + +### options.star + +Customize the star character(s) to use for matching. It's not recommended that you modify this unless you have advanced knowledge of the compiler and matching rules. + +Type: `string|function` + +Default: `[^/\\]*?` + +
+ +
+snapdragon + +### options.snapdragon + +Pass your own instance of [snapdragon](https://github.com/jonschlinkert/snapdragon) to customize parsers or compilers. + +Type: `object` + +Default: `undefined` + +
+ +
+snapdragon + +### options.sourcemap + +Generate a source map by enabling the `sourcemap` option with the `.parse`, `.compile`, or `.create` methods. + +**Examples** + +```js +var nm = require('nanomatch'); + +var res = nm.create('abc/*.js', {sourcemap: true}); +console.log(res.map); +// { version: 3, +// sources: [ 'string' ], +// names: [], +// mappings: 'AAAA,GAAG,EAAC,iBAAC,EAAC,EAAE', +// sourcesContent: [ 'abc/*.js' ] } + +var ast = nm.parse('abc/**/*.js'); +var res = nm.compile(ast, {sourcemap: true}); +console.log(res.map); +// { version: 3, +// sources: [ 'string' ], +// names: [], +// mappings: 'AAAA,GAAG,EAAC,2BAAE,EAAC,iBAAC,EAAC,EAAE', +// sourcesContent: [ 'abc/**/*.js' ] } +``` + +
+ +
+unescape + +### options.unescape + +Remove backslashes from returned matches. + +Type: `boolean` + +Default: `undefined` + +**Example** + +In this example we want to match a literal `*`: + +```js +nm.match(['abc', 'a\\*c'], 'a\\*c'); +//=> ['a\\*c'] + +nm.match(['abc', 'a\\*c'], 'a\\*c', {unescape: true}); +//=> ['a*c'] +``` + +
+ +
+unixify + +### options.unixify + +Convert path separators on returned files to posix/unix-style forward slashes. + +Type: `boolean` + +Default: `true` + +**Example** + +```js +nm.match(['a\\b\\c'], 'a/**'); +//=> ['a/b/c'] + +nm.match(['a\\b\\c'], {unixify: false}); +//=> ['a\\b\\c'] +``` + +
+ +## Features + +Nanomatch has full support for standard Bash glob features, including the following "metacharacters": `*`, `**`, `?` and `[...]`. + +Here are some examples of how they work: + +| **Pattern** | **Description** | +| --- | --- | +| `*` | Matches any string except for `/`, leading `.`, or `/.` inside a path | +| `**` | Matches any string including `/`, but not a leading `.` or `/.` inside a path. More than two stars (e.g. `***` is treated the same as one star, and `**` loses its special meaning | when it's not the only thing in a path segment, per Bash specifications) | +| `foo*` | Matches any string beginning with `foo` | +| `*bar*` | Matches any string containing `bar` (beginning, middle or end) | +| `*.min.js` | Matches any string ending with `.min.js` | +| `[abc]*.js` | Matches any string beginning with `a`, `b`, or `c` and ending with `.js` | +| `abc?` | Matches `abcd` or `abcz` but not `abcde` | + +The exceptions noted for `*` apply to all patterns that contain a `*`. + +**Not supported** + +The following extended-globbing features are not supported: + +* [brace expansion](https://github.com/jonschlinkert/braces) (e.g. `{a,b,c}`) +* [extglobs](https://github.com/jonschlinkert/extglob) (e.g. `@(a|!(c|d))`) +* [POSIX brackets](https://github.com/jonschlinkert/expand-brackets) (e.g. `[[:alpha:][:digit:]]`) + +If you need any of these features consider using [micromatch](https://github.com/micromatch/micromatch) instead. + +## Bash expansion libs + +Nanomatch is part of a suite of libraries aimed at bringing the power and expressiveness of [Bash's](https://www.gnu.org/software/bash/) matching and expansion capabilities to JavaScript, _and - as you can see by the [benchmarks](#benchmarks) - without sacrificing speed_. + +| **Related library** | **Matching Type** | **Example** | **Description** | +| --- | --- | --- | --- | +| `nanomatch` (you are here) | Wildcards | `*` | [Filename expansion](https://www.gnu.org/software/bash/manual/html_node/Filename-Expansion.html#Filename-Expansion), also referred to as globbing and pathname expansion, allows the use of [wildcards](#features) for matching. | +| [expand-tilde](https://github.com/jonschlinkert/expand-tilde) | Tildes | `~` | [Tilde expansion](https://www.gnu.org/software/bash/manual/html_node/Tilde-Expansion.html#Tilde-Expansion) converts the leading tilde in a file path to the user home directory. | +| [braces](https://github.com/jonschlinkert/braces) | Braces | `{a,b,c}` | [Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) | +| [expand-brackets](https://github.com/jonschlinkert/expand-brackets) | Brackets | `[[:alpha:]]` | [POSIX character classes](https://www.gnu.org/software/grep/manual/html_node/Character-Classes-and-Bracket-Expressions.html) (also referred to as POSIX brackets, or POSIX character classes) | +| [extglob](https://github.com/jonschlinkert/extglob) | Parens | `!(a\ | b)` | [Extglobs](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html#Pattern-Matching) | +| [micromatch](https://github.com/micromatch/micromatch) | All | all | Micromatch is built on top of the other libraries. | + +There are many resources available on the web if you want to dive deeper into how these features work in Bash. + +## Benchmarks + +### Running benchmarks + +Install dev dependencies: + +```bash +npm i -d && node benchmark +``` + +### Nanomatch vs. Minimatch vs. Multimatch + +```bash +# globstar-basic (182 bytes) + minimatch x 69,512 ops/sec ±1.92% (88 runs sampled) + multimatch x 63,376 ops/sec ±1.41% (89 runs sampled) + nanomatch x 432,451 ops/sec ±0.92% (88 runs sampled) + + fastest is nanomatch (by 651% avg) + +# large-list-globstar (485686 bytes) + minimatch x 34.02 ops/sec ±1.42% (59 runs sampled) + multimatch x 33.58 ops/sec ±1.97% (58 runs sampled) + nanomatch x 483 ops/sec ±1.06% (86 runs sampled) + + fastest is nanomatch (by 1429% avg) + +# long-list-globstar (194085 bytes) + minimatch x 383 ops/sec ±0.74% (90 runs sampled) + multimatch x 378 ops/sec ±0.59% (89 runs sampled) + nanomatch x 990 ops/sec ±1.14% (85 runs sampled) + + fastest is nanomatch (by 260% avg) + +# negation-basic (132 bytes) + minimatch x 242,145 ops/sec ±1.17% (89 runs sampled) + multimatch x 76,403 ops/sec ±0.78% (92 runs sampled) + nanomatch x 537,253 ops/sec ±1.44% (86 runs sampled) + + fastest is nanomatch (by 337% avg) + +# not-glob-basic (93 bytes) + minimatch x 252,402 ops/sec ±1.33% (89 runs sampled) + multimatch x 209,954 ops/sec ±1.30% (90 runs sampled) + nanomatch x 1,716,468 ops/sec ±1.13% (86 runs sampled) + + fastest is nanomatch (by 742% avg) + +# star-basic (93 bytes) + minimatch x 182,780 ops/sec ±1.41% (91 runs sampled) + multimatch x 153,210 ops/sec ±0.72% (89 runs sampled) + nanomatch x 599,621 ops/sec ±1.22% (90 runs sampled) + + fastest is nanomatch (by 357% avg) + +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [extglob](https://www.npmjs.com/package/extglob): Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob… [more](https://github.com/micromatch/extglob) | [homepage](https://github.com/micromatch/extglob "Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob patterns.") +* [is-extglob](https://www.npmjs.com/package/is-extglob): Returns true if a string has an extglob. | [homepage](https://github.com/jonschlinkert/is-extglob "Returns true if a string has an extglob.") +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/micromatch/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 164 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [devongovett](https://github.com/devongovett) | + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on February 18, 2018._ \ No newline at end of file diff --git a/node_modules/nanomatch/index.js b/node_modules/nanomatch/index.js new file mode 100644 index 00000000..dddc6128 --- /dev/null +++ b/node_modules/nanomatch/index.js @@ -0,0 +1,838 @@ +'use strict'; + +/** + * Module dependencies + */ + +var util = require('util'); +var toRegex = require('to-regex'); +var extend = require('extend-shallow'); + +/** + * Local dependencies + */ + +var compilers = require('./lib/compilers'); +var parsers = require('./lib/parsers'); +var cache = require('./lib/cache'); +var utils = require('./lib/utils'); +var MAX_LENGTH = 1024 * 64; + +/** + * The main function takes a list of strings and one or more + * glob patterns to use for matching. + * + * ```js + * var nm = require('nanomatch'); + * nm(list, patterns[, options]); + * + * console.log(nm(['a.js', 'a.txt'], ['*.js'])); + * //=> [ 'a.js' ] + * ``` + * @param {Array} `list` A list of strings to match + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of matches + * @summary false + * @api public + */ + +function nanomatch(list, patterns, options) { + patterns = utils.arrayify(patterns); + list = utils.arrayify(list); + + var len = patterns.length; + if (list.length === 0 || len === 0) { + return []; + } + + if (len === 1) { + return nanomatch.match(list, patterns[0], options); + } + + var negated = false; + var omit = []; + var keep = []; + var idx = -1; + + while (++idx < len) { + var pattern = patterns[idx]; + + if (typeof pattern === 'string' && pattern.charCodeAt(0) === 33 /* ! */) { + omit.push.apply(omit, nanomatch.match(list, pattern.slice(1), options)); + negated = true; + } else { + keep.push.apply(keep, nanomatch.match(list, pattern, options)); + } + } + + // minimatch.match parity + if (negated && keep.length === 0) { + if (options && options.unixify === false) { + keep = list.slice(); + } else { + var unixify = utils.unixify(options); + for (var i = 0; i < list.length; i++) { + keep.push(unixify(list[i])); + } + } + } + + var matches = utils.diff(keep, omit); + if (!options || options.nodupes !== false) { + return utils.unique(matches); + } + + return matches; +} + +/** + * Similar to the main function, but `pattern` must be a string. + * + * ```js + * var nm = require('nanomatch'); + * nm.match(list, pattern[, options]); + * + * console.log(nm.match(['a.a', 'a.aa', 'a.b', 'a.c'], '*.a')); + * //=> ['a.a', 'a.aa'] + * ``` + * @param {Array} `list` Array of strings to match + * @param {String} `pattern` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of matches + * @api public + */ + +nanomatch.match = function(list, pattern, options) { + if (Array.isArray(pattern)) { + throw new TypeError('expected pattern to be a string'); + } + + var unixify = utils.unixify(options); + var isMatch = memoize('match', pattern, options, nanomatch.matcher); + var matches = []; + + list = utils.arrayify(list); + var len = list.length; + var idx = -1; + + while (++idx < len) { + var ele = list[idx]; + if (ele === pattern || isMatch(ele)) { + matches.push(utils.value(ele, unixify, options)); + } + } + + // if no options were passed, uniquify results and return + if (typeof options === 'undefined') { + return utils.unique(matches); + } + + if (matches.length === 0) { + if (options.failglob === true) { + throw new Error('no matches found for "' + pattern + '"'); + } + if (options.nonull === true || options.nullglob === true) { + return [options.unescape ? utils.unescape(pattern) : pattern]; + } + } + + // if `opts.ignore` was defined, diff ignored list + if (options.ignore) { + matches = nanomatch.not(matches, options.ignore, options); + } + + return options.nodupes !== false ? utils.unique(matches) : matches; +}; + +/** + * Returns true if the specified `string` matches the given glob `pattern`. + * + * ```js + * var nm = require('nanomatch'); + * nm.isMatch(string, pattern[, options]); + * + * console.log(nm.isMatch('a.a', '*.a')); + * //=> true + * console.log(nm.isMatch('a.b', '*.a')); + * //=> false + * ``` + * @param {String} `string` String to match + * @param {String} `pattern` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if the string matches the glob pattern. + * @api public + */ + +nanomatch.isMatch = function(str, pattern, options) { + if (typeof str !== 'string') { + throw new TypeError('expected a string: "' + util.inspect(str) + '"'); + } + + if (utils.isEmptyString(str) || utils.isEmptyString(pattern)) { + return false; + } + + var equals = utils.equalsPattern(options); + if (equals(str)) { + return true; + } + + var isMatch = memoize('isMatch', pattern, options, nanomatch.matcher); + return isMatch(str); +}; + +/** + * Returns true if some of the elements in the given `list` match any of the + * given glob `patterns`. + * + * ```js + * var nm = require('nanomatch'); + * nm.some(list, patterns[, options]); + * + * console.log(nm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // true + * console.log(nm.some(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +nanomatch.some = function(list, patterns, options) { + if (typeof list === 'string') { + list = [list]; + } + + for (var i = 0; i < list.length; i++) { + if (nanomatch(list[i], patterns, options).length === 1) { + return true; + } + } + + return false; +}; + +/** + * Returns true if every element in the given `list` matches + * at least one of the given glob `patterns`. + * + * ```js + * var nm = require('nanomatch'); + * nm.every(list, patterns[, options]); + * + * console.log(nm.every('foo.js', ['foo.js'])); + * // true + * console.log(nm.every(['foo.js', 'bar.js'], ['*.js'])); + * // true + * console.log(nm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // false + * console.log(nm.every(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +nanomatch.every = function(list, patterns, options) { + if (typeof list === 'string') { + list = [list]; + } + + for (var i = 0; i < list.length; i++) { + if (nanomatch(list[i], patterns, options).length !== 1) { + return false; + } + } + + return true; +}; + +/** + * Returns true if **any** of the given glob `patterns` + * match the specified `string`. + * + * ```js + * var nm = require('nanomatch'); + * nm.any(string, patterns[, options]); + * + * console.log(nm.any('a.a', ['b.*', '*.a'])); + * //=> true + * console.log(nm.any('a.a', 'b.*')); + * //=> false + * ``` + * @param {String|Array} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +nanomatch.any = function(str, patterns, options) { + if (typeof str !== 'string') { + throw new TypeError('expected a string: "' + util.inspect(str) + '"'); + } + + if (utils.isEmptyString(str) || utils.isEmptyString(patterns)) { + return false; + } + + if (typeof patterns === 'string') { + patterns = [patterns]; + } + + for (var i = 0; i < patterns.length; i++) { + if (nanomatch.isMatch(str, patterns[i], options)) { + return true; + } + } + return false; +}; + +/** + * Returns true if **all** of the given `patterns` + * match the specified string. + * + * ```js + * var nm = require('nanomatch'); + * nm.all(string, patterns[, options]); + * + * console.log(nm.all('foo.js', ['foo.js'])); + * // true + * + * console.log(nm.all('foo.js', ['*.js', '!foo.js'])); + * // false + * + * console.log(nm.all('foo.js', ['*.js', 'foo.js'])); + * // true + * + * console.log(nm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); + * // true + * ``` + * @param {String|Array} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +nanomatch.all = function(str, patterns, options) { + if (typeof str !== 'string') { + throw new TypeError('expected a string: "' + util.inspect(str) + '"'); + } + + if (typeof patterns === 'string') { + patterns = [patterns]; + } + + for (var i = 0; i < patterns.length; i++) { + if (!nanomatch.isMatch(str, patterns[i], options)) { + return false; + } + } + return true; +}; + +/** + * Returns a list of strings that _**do not match any**_ of the given `patterns`. + * + * ```js + * var nm = require('nanomatch'); + * nm.not(list, patterns[, options]); + * + * console.log(nm.not(['a.a', 'b.b', 'c.c'], '*.a')); + * //=> ['b.b', 'c.c'] + * ``` + * @param {Array} `list` Array of strings to match. + * @param {String|Array} `patterns` One or more glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of strings that **do not match** the given patterns. + * @api public + */ + +nanomatch.not = function(list, patterns, options) { + var opts = extend({}, options); + var ignore = opts.ignore; + delete opts.ignore; + + list = utils.arrayify(list); + + var matches = utils.diff(list, nanomatch(list, patterns, opts)); + if (ignore) { + matches = utils.diff(matches, nanomatch(list, ignore)); + } + + return opts.nodupes !== false ? utils.unique(matches) : matches; +}; + +/** + * Returns true if the given `string` contains the given pattern. Similar + * to [.isMatch](#isMatch) but the pattern can match any part of the string. + * + * ```js + * var nm = require('nanomatch'); + * nm.contains(string, pattern[, options]); + * + * console.log(nm.contains('aa/bb/cc', '*b')); + * //=> true + * console.log(nm.contains('aa/bb/cc', '*d')); + * //=> false + * ``` + * @param {String} `str` The string to match. + * @param {String|Array} `patterns` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if the patter matches any part of `str`. + * @api public + */ + +nanomatch.contains = function(str, patterns, options) { + if (typeof str !== 'string') { + throw new TypeError('expected a string: "' + util.inspect(str) + '"'); + } + + if (typeof patterns === 'string') { + if (utils.isEmptyString(str) || utils.isEmptyString(patterns)) { + return false; + } + + var equals = utils.equalsPattern(patterns, options); + if (equals(str)) { + return true; + } + var contains = utils.containsPattern(patterns, options); + if (contains(str)) { + return true; + } + } + + var opts = extend({}, options, {contains: true}); + return nanomatch.any(str, patterns, opts); +}; + +/** + * Returns true if the given pattern and options should enable + * the `matchBase` option. + * @return {Boolean} + * @api private + */ + +nanomatch.matchBase = function(pattern, options) { + if (pattern && pattern.indexOf('/') !== -1 || !options) return false; + return options.basename === true || options.matchBase === true; +}; + +/** + * Filter the keys of the given object with the given `glob` pattern + * and `options`. Does not attempt to match nested keys. If you need this feature, + * use [glob-object][] instead. + * + * ```js + * var nm = require('nanomatch'); + * nm.matchKeys(object, patterns[, options]); + * + * var obj = { aa: 'a', ab: 'b', ac: 'c' }; + * console.log(nm.matchKeys(obj, '*b')); + * //=> { ab: 'b' } + * ``` + * @param {Object} `object` The object with keys to filter. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Object} Returns an object with only keys that match the given patterns. + * @api public + */ + +nanomatch.matchKeys = function(obj, patterns, options) { + if (!utils.isObject(obj)) { + throw new TypeError('expected the first argument to be an object'); + } + var keys = nanomatch(Object.keys(obj), patterns, options); + return utils.pick(obj, keys); +}; + +/** + * Returns a memoized matcher function from the given glob `pattern` and `options`. + * The returned function takes a string to match as its only argument and returns + * true if the string is a match. + * + * ```js + * var nm = require('nanomatch'); + * nm.matcher(pattern[, options]); + * + * var isMatch = nm.matcher('*.!(*a)'); + * console.log(isMatch('a.a')); + * //=> false + * console.log(isMatch('a.b')); + * //=> true + * ``` + * @param {String} `pattern` Glob pattern + * @param {Object} `options` See available [options](#options) for changing how matches are performed. + * @return {Function} Returns a matcher function. + * @api public + */ + +nanomatch.matcher = function matcher(pattern, options) { + if (utils.isEmptyString(pattern)) { + return function() { + return false; + }; + } + + if (Array.isArray(pattern)) { + return compose(pattern, options, matcher); + } + + // if pattern is a regex + if (pattern instanceof RegExp) { + return test(pattern); + } + + // if pattern is invalid + if (!utils.isString(pattern)) { + throw new TypeError('expected pattern to be an array, string or regex'); + } + + // if pattern is a non-glob string + if (!utils.hasSpecialChars(pattern)) { + if (options && options.nocase === true) { + pattern = pattern.toLowerCase(); + } + return utils.matchPath(pattern, options); + } + + // if pattern is a glob string + var re = nanomatch.makeRe(pattern, options); + + // if `options.matchBase` or `options.basename` is defined + if (nanomatch.matchBase(pattern, options)) { + return utils.matchBasename(re, options); + } + + function test(regex) { + var equals = utils.equalsPattern(options); + var unixify = utils.unixify(options); + + return function(str) { + if (equals(str)) { + return true; + } + + if (regex.test(unixify(str))) { + return true; + } + return false; + }; + } + + // create matcher function + var matcherFn = test(re); + // set result object from compiler on matcher function, + // as a non-enumerable property. useful for debugging + utils.define(matcherFn, 'result', re.result); + return matcherFn; +}; + +/** + * Returns an array of matches captured by `pattern` in `string, or + * `null` if the pattern did not match. + * + * ```js + * var nm = require('nanomatch'); + * nm.capture(pattern, string[, options]); + * + * console.log(nm.capture('test/*.js', 'test/foo.js')); + * //=> ['foo'] + * console.log(nm.capture('test/*.js', 'foo/bar.css')); + * //=> null + * ``` + * @param {String} `pattern` Glob pattern to use for matching. + * @param {String} `string` String to match + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns an array of captures if the string matches the glob pattern, otherwise `null`. + * @api public + */ + +nanomatch.capture = function(pattern, str, options) { + var re = nanomatch.makeRe(pattern, extend({capture: true}, options)); + var unixify = utils.unixify(options); + + function match() { + return function(string) { + var match = re.exec(unixify(string)); + if (!match) { + return null; + } + + return match.slice(1); + }; + } + + var capture = memoize('capture', pattern, options, match); + return capture(str); +}; + +/** + * Create a regular expression from the given glob `pattern`. + * + * ```js + * var nm = require('nanomatch'); + * nm.makeRe(pattern[, options]); + * + * console.log(nm.makeRe('*.js')); + * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ + * ``` + * @param {String} `pattern` A glob pattern to convert to regex. + * @param {Object} `options` See available [options](#options) for changing how matches are performed. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +nanomatch.makeRe = function(pattern, options) { + if (pattern instanceof RegExp) { + return pattern; + } + + if (typeof pattern !== 'string') { + throw new TypeError('expected pattern to be a string'); + } + + if (pattern.length > MAX_LENGTH) { + throw new Error('expected pattern to be less than ' + MAX_LENGTH + ' characters'); + } + + function makeRe() { + var opts = utils.extend({wrap: false}, options); + var result = nanomatch.create(pattern, opts); + var regex = toRegex(result.output, opts); + utils.define(regex, 'result', result); + return regex; + } + + return memoize('makeRe', pattern, options, makeRe); +}; + +/** + * Parses the given glob `pattern` and returns an object with the compiled `output` + * and optional source `map`. + * + * ```js + * var nm = require('nanomatch'); + * nm.create(pattern[, options]); + * + * console.log(nm.create('abc/*.js')); + * // { options: { source: 'string', sourcemap: true }, + * // state: {}, + * // compilers: + * // { ... }, + * // output: '(\\.[\\\\\\/])?abc\\/(?!\\.)(?=.)[^\\/]*?\\.js', + * // ast: + * // { type: 'root', + * // errors: [], + * // nodes: + * // [ ... ], + * // dot: false, + * // input: 'abc/*.js' }, + * // parsingErrors: [], + * // map: + * // { version: 3, + * // sources: [ 'string' ], + * // names: [], + * // mappings: 'AAAA,GAAG,EAAC,kBAAC,EAAC,EAAE', + * // sourcesContent: [ 'abc/*.js' ] }, + * // position: { line: 1, column: 28 }, + * // content: {}, + * // files: {}, + * // idx: 6 } + * ``` + * @param {String} `pattern` Glob pattern to parse and compile. + * @param {Object} `options` Any [options](#options) to change how parsing and compiling is performed. + * @return {Object} Returns an object with the parsed AST, compiled string and optional source map. + * @api public + */ + +nanomatch.create = function(pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('expected a string'); + } + function create() { + return nanomatch.compile(nanomatch.parse(pattern, options), options); + } + return memoize('create', pattern, options, create); +}; + +/** + * Parse the given `str` with the given `options`. + * + * ```js + * var nm = require('nanomatch'); + * nm.parse(pattern[, options]); + * + * var ast = nm.parse('a/{b,c}/d'); + * console.log(ast); + * // { type: 'root', + * // errors: [], + * // input: 'a/{b,c}/d', + * // nodes: + * // [ { type: 'bos', val: '' }, + * // { type: 'text', val: 'a/' }, + * // { type: 'brace', + * // nodes: + * // [ { type: 'brace.open', val: '{' }, + * // { type: 'text', val: 'b,c' }, + * // { type: 'brace.close', val: '}' } ] }, + * // { type: 'text', val: '/d' }, + * // { type: 'eos', val: '' } ] } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an AST + * @api public + */ + +nanomatch.parse = function(pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('expected a string'); + } + + function parse() { + var snapdragon = utils.instantiate(null, options); + parsers(snapdragon, options); + + var ast = snapdragon.parse(pattern, options); + utils.define(ast, 'snapdragon', snapdragon); + ast.input = pattern; + return ast; + } + + return memoize('parse', pattern, options, parse); +}; + +/** + * Compile the given `ast` or string with the given `options`. + * + * ```js + * var nm = require('nanomatch'); + * nm.compile(ast[, options]); + * + * var ast = nm.parse('a/{b,c}/d'); + * console.log(nm.compile(ast)); + * // { options: { source: 'string' }, + * // state: {}, + * // compilers: + * // { eos: [Function], + * // noop: [Function], + * // bos: [Function], + * // brace: [Function], + * // 'brace.open': [Function], + * // text: [Function], + * // 'brace.close': [Function] }, + * // output: [ 'a/(b|c)/d' ], + * // ast: + * // { ... }, + * // parsingErrors: [] } + * ``` + * @param {Object|String} `ast` + * @param {Object} `options` + * @return {Object} Returns an object that has an `output` property with the compiled string. + * @api public + */ + +nanomatch.compile = function(ast, options) { + if (typeof ast === 'string') { + ast = nanomatch.parse(ast, options); + } + + function compile() { + var snapdragon = utils.instantiate(ast, options); + compilers(snapdragon, options); + return snapdragon.compile(ast, options); + } + + return memoize('compile', ast.input, options, compile); +}; + +/** + * Clear the regex cache. + * + * ```js + * nm.clearCache(); + * ``` + * @api public + */ + +nanomatch.clearCache = function() { + nanomatch.cache.__data__ = {}; +}; + +/** + * Compose a matcher function with the given patterns. + * This allows matcher functions to be compiled once and + * called multiple times. + */ + +function compose(patterns, options, matcher) { + var matchers; + + return memoize('compose', String(patterns), options, function() { + return function(file) { + // delay composition until it's invoked the first time, + // after that it won't be called again + if (!matchers) { + matchers = []; + for (var i = 0; i < patterns.length; i++) { + matchers.push(matcher(patterns[i], options)); + } + } + + var len = matchers.length; + while (len--) { + if (matchers[len](file) === true) { + return true; + } + } + return false; + }; + }); +} + +/** + * Memoize a generated regex or function. A unique key is generated + * from the `type` (usually method name), the `pattern`, and + * user-defined options. + */ + +function memoize(type, pattern, options, fn) { + var key = utils.createKey(type + '=' + pattern, options); + + if (options && options.cache === false) { + return fn(pattern, options); + } + + if (cache.has(type, key)) { + return cache.get(type, key); + } + + var val = fn(pattern, options); + cache.set(type, key, val); + return val; +} + +/** + * Expose compiler, parser and cache on `nanomatch` + */ + +nanomatch.compilers = compilers; +nanomatch.parsers = parsers; +nanomatch.cache = cache; + +/** + * Expose `nanomatch` + * @type {Function} + */ + +module.exports = nanomatch; diff --git a/node_modules/nanomatch/lib/cache.js b/node_modules/nanomatch/lib/cache.js new file mode 100644 index 00000000..fffc4c17 --- /dev/null +++ b/node_modules/nanomatch/lib/cache.js @@ -0,0 +1 @@ +module.exports = new (require('fragment-cache'))(); diff --git a/node_modules/nanomatch/lib/compilers.js b/node_modules/nanomatch/lib/compilers.js new file mode 100644 index 00000000..d7a786e7 --- /dev/null +++ b/node_modules/nanomatch/lib/compilers.js @@ -0,0 +1,339 @@ +'use strict'; + +/** +* Nanomatch compilers +*/ + +module.exports = function(nanomatch, options) { + function slash() { + if (options && typeof options.slash === 'string') { + return options.slash; + } + if (options && typeof options.slash === 'function') { + return options.slash.call(nanomatch); + } + return '\\\\/'; + } + + function star() { + if (options && typeof options.star === 'string') { + return options.star; + } + if (options && typeof options.star === 'function') { + return options.star.call(nanomatch); + } + return '[^' + slash() + ']*?'; + } + + var ast = nanomatch.ast = nanomatch.parser.ast; + ast.state = nanomatch.parser.state; + nanomatch.compiler.state = ast.state; + nanomatch.compiler + + /** + * Negation / escaping + */ + + .set('not', function(node) { + var prev = this.prev(); + if (this.options.nonegate === true || prev.type !== 'bos') { + return this.emit('\\' + node.val, node); + } + return this.emit(node.val, node); + }) + .set('escape', function(node) { + if (this.options.unescape && /^[-\w_.]/.test(node.val)) { + return this.emit(node.val, node); + } + return this.emit('\\' + node.val, node); + }) + .set('quoted', function(node) { + return this.emit(node.val, node); + }) + + /** + * Regex + */ + + .set('dollar', function(node) { + if (node.parent.type === 'bracket') { + return this.emit(node.val, node); + } + return this.emit('\\' + node.val, node); + }) + + /** + * Dot: "." + */ + + .set('dot', function(node) { + if (node.dotfiles === true) this.dotfiles = true; + return this.emit('\\' + node.val, node); + }) + + /** + * Slashes: "/" and "\" + */ + + .set('backslash', function(node) { + return this.emit(node.val, node); + }) + .set('slash', function(node, nodes, i) { + var val = '[' + slash() + ']'; + var parent = node.parent; + var prev = this.prev(); + + // set "node.hasSlash" to true on all ancestor parens nodes + while (parent.type === 'paren' && !parent.hasSlash) { + parent.hasSlash = true; + parent = parent.parent; + } + + if (prev.addQmark) { + val += '?'; + } + + // word boundary + if (node.rest.slice(0, 2) === '\\b') { + return this.emit(val, node); + } + + // globstars + if (node.parsed === '**' || node.parsed === './**') { + this.output = '(?:' + this.output; + return this.emit(val + ')?', node); + } + + // negation + if (node.parsed === '!**' && this.options.nonegate !== true) { + return this.emit(val + '?\\b', node); + } + return this.emit(val, node); + }) + + /** + * Square brackets + */ + + .set('bracket', function(node) { + var close = node.close; + var open = !node.escaped ? '[' : '\\['; + var negated = node.negated; + var inner = node.inner; + var val = node.val; + + if (node.escaped === true) { + inner = inner.replace(/\\?(\W)/g, '\\$1'); + negated = ''; + } + + if (inner === ']-') { + inner = '\\]\\-'; + } + + if (negated && inner.indexOf('.') === -1) { + inner += '.'; + } + if (negated && inner.indexOf('/') === -1) { + inner += '/'; + } + + val = open + negated + inner + close; + return this.emit(val, node); + }) + + /** + * Square: "[.]" (only matches a single character in brackets) + */ + + .set('square', function(node) { + var val = (/^\W/.test(node.val) ? '\\' : '') + node.val; + return this.emit(val, node); + }) + + /** + * Question mark: "?" + */ + + .set('qmark', function(node) { + var prev = this.prev(); + // don't use "slash" variable so that we always avoid + // matching backslashes and slashes with a qmark + var val = '[^.\\\\/]'; + if (this.options.dot || (prev.type !== 'bos' && prev.type !== 'slash')) { + val = '[^\\\\/]'; + } + + if (node.parsed.slice(-1) === '(') { + var ch = node.rest.charAt(0); + if (ch === '!' || ch === '=' || ch === ':') { + return this.emit(node.val, node); + } + } + + if (node.val.length > 1) { + val += '{' + node.val.length + '}'; + } + return this.emit(val, node); + }) + + /** + * Plus + */ + + .set('plus', function(node) { + var prev = node.parsed.slice(-1); + if (prev === ']' || prev === ')') { + return this.emit(node.val, node); + } + if (!this.output || (/[?*+]/.test(ch) && node.parent.type !== 'bracket')) { + return this.emit('\\+', node); + } + var ch = this.output.slice(-1); + if (/\w/.test(ch) && !node.inside) { + return this.emit('+\\+?', node); + } + return this.emit('+', node); + }) + + /** + * globstar: '**' + */ + + .set('globstar', function(node, nodes, i) { + if (!this.output) { + this.state.leadingGlobstar = true; + } + + var prev = this.prev(); + var before = this.prev(2); + var next = this.next(); + var after = this.next(2); + var type = prev.type; + var val = node.val; + + if (prev.type === 'slash' && next.type === 'slash') { + if (before.type === 'text') { + this.output += '?'; + + if (after.type !== 'text') { + this.output += '\\b'; + } + } + } + + var parsed = node.parsed; + if (parsed.charAt(0) === '!') { + parsed = parsed.slice(1); + } + + var isInside = node.isInside.paren || node.isInside.brace; + if (parsed && type !== 'slash' && type !== 'bos' && !isInside) { + val = star(); + } else { + val = this.options.dot !== true + ? '(?:(?!(?:[' + slash() + ']|^)\\.).)*?' + : '(?:(?!(?:[' + slash() + ']|^)(?:\\.{1,2})($|[' + slash() + ']))(?!\\.{2}).)*?'; + } + + if ((type === 'slash' || type === 'bos') && this.options.dot !== true) { + val = '(?!\\.)' + val; + } + + if (prev.type === 'slash' && next.type === 'slash' && before.type !== 'text') { + if (after.type === 'text' || after.type === 'star') { + node.addQmark = true; + } + } + + if (this.options.capture) { + val = '(' + val + ')'; + } + + return this.emit(val, node); + }) + + /** + * Star: "*" + */ + + .set('star', function(node, nodes, i) { + var prior = nodes[i - 2] || {}; + var prev = this.prev(); + var next = this.next(); + var type = prev.type; + + function isStart(n) { + return n.type === 'bos' || n.type === 'slash'; + } + + if (this.output === '' && this.options.contains !== true) { + this.output = '(?![' + slash() + '])'; + } + + if (type === 'bracket' && this.options.bash === false) { + var str = next && next.type === 'bracket' ? star() : '*?'; + if (!prev.nodes || prev.nodes[1].type !== 'posix') { + return this.emit(str, node); + } + } + + var prefix = !this.dotfiles && type !== 'text' && type !== 'escape' + ? (this.options.dot ? '(?!(?:^|[' + slash() + '])\\.{1,2}(?:$|[' + slash() + ']))' : '(?!\\.)') + : ''; + + if (isStart(prev) || (isStart(prior) && type === 'not')) { + if (prefix !== '(?!\\.)') { + prefix += '(?!(\\.{2}|\\.[' + slash() + ']))(?=.)'; + } else { + prefix += '(?=.)'; + } + } else if (prefix === '(?!\\.)') { + prefix = ''; + } + + if (prev.type === 'not' && prior.type === 'bos' && this.options.dot === true) { + this.output = '(?!\\.)' + this.output; + } + + var output = prefix + star(); + if (this.options.capture) { + output = '(' + output + ')'; + } + + return this.emit(output, node); + }) + + /** + * Text + */ + + .set('text', function(node) { + return this.emit(node.val, node); + }) + + /** + * End-of-string + */ + + .set('eos', function(node) { + var prev = this.prev(); + var val = node.val; + + this.output = '(?:\\.[' + slash() + '](?=.))?' + this.output; + if (this.state.metachar && prev.type !== 'qmark' && prev.type !== 'slash') { + val += (this.options.contains ? '[' + slash() + ']?' : '(?:[' + slash() + ']|$)'); + } + + return this.emit(val, node); + }); + + /** + * Allow custom compilers to be passed on options + */ + + if (options && typeof options.compilers === 'function') { + options.compilers(nanomatch.compiler); + } +}; + diff --git a/node_modules/nanomatch/lib/parsers.js b/node_modules/nanomatch/lib/parsers.js new file mode 100644 index 00000000..f87df8f3 --- /dev/null +++ b/node_modules/nanomatch/lib/parsers.js @@ -0,0 +1,386 @@ +'use strict'; + +var regexNot = require('regex-not'); +var toRegex = require('to-regex'); + +/** + * Characters to use in negation regex (we want to "not" match + * characters that are matched by other parsers) + */ + +var cached; +var NOT_REGEX = '[\\[!*+?$^"\'.\\\\/]+'; +var not = createTextRegex(NOT_REGEX); + +/** + * Nanomatch parsers + */ + +module.exports = function(nanomatch, options) { + var parser = nanomatch.parser; + var opts = parser.options; + + parser.state = { + slashes: 0, + paths: [] + }; + + parser.ast.state = parser.state; + parser + + /** + * Beginning-of-string + */ + + .capture('prefix', function() { + if (this.parsed) return; + var m = this.match(/^\.[\\/]/); + if (!m) return; + this.state.strictOpen = !!this.options.strictOpen; + this.state.addPrefix = true; + }) + + /** + * Escape: "\\." + */ + + .capture('escape', function() { + if (this.isInside('bracket')) return; + var pos = this.position(); + var m = this.match(/^(?:\\(.)|([$^]))/); + if (!m) return; + + return pos({ + type: 'escape', + val: m[2] || m[1] + }); + }) + + /** + * Quoted strings + */ + + .capture('quoted', function() { + var pos = this.position(); + var m = this.match(/^["']/); + if (!m) return; + + var quote = m[0]; + if (this.input.indexOf(quote) === -1) { + return pos({ + type: 'escape', + val: quote + }); + } + + var tok = advanceTo(this.input, quote); + this.consume(tok.len); + + return pos({ + type: 'quoted', + val: tok.esc + }); + }) + + /** + * Negations: "!" + */ + + .capture('not', function() { + var parsed = this.parsed; + var pos = this.position(); + var m = this.match(this.notRegex || /^!+/); + if (!m) return; + var val = m[0]; + + var isNegated = (val.length % 2) === 1; + if (parsed === '' && !isNegated) { + val = ''; + } + + // if nothing has been parsed, we know `!` is at the start, + // so we need to wrap the result in a negation regex + if (parsed === '' && isNegated && this.options.nonegate !== true) { + this.bos.val = '(?!^(?:'; + this.append = ')$).*'; + val = ''; + } + return pos({ + type: 'not', + val: val + }); + }) + + /** + * Dot: "." + */ + + .capture('dot', function() { + var parsed = this.parsed; + var pos = this.position(); + var m = this.match(/^\.+/); + if (!m) return; + + var val = m[0]; + this.state.dot = val === '.' && (parsed === '' || parsed.slice(-1) === '/'); + + return pos({ + type: 'dot', + dotfiles: this.state.dot, + val: val + }); + }) + + /** + * Plus: "+" + */ + + .capture('plus', /^\+(?!\()/) + + /** + * Question mark: "?" + */ + + .capture('qmark', function() { + var parsed = this.parsed; + var pos = this.position(); + var m = this.match(/^\?+(?!\()/); + if (!m) return; + + this.state.metachar = true; + this.state.qmark = true; + + return pos({ + type: 'qmark', + parsed: parsed, + val: m[0] + }); + }) + + /** + * Globstar: "**" + */ + + .capture('globstar', function() { + var parsed = this.parsed; + var pos = this.position(); + var m = this.match(/^\*{2}(?![*(])(?=[,)/]|$)/); + if (!m) return; + + var type = opts.noglobstar !== true ? 'globstar' : 'star'; + var node = pos({type: type, parsed: parsed}); + this.state.metachar = true; + + while (this.input.slice(0, 4) === '/**/') { + this.input = this.input.slice(3); + } + + node.isInside = { + brace: this.isInside('brace'), + paren: this.isInside('paren') + }; + + if (type === 'globstar') { + this.state.globstar = true; + node.val = '**'; + + } else { + this.state.star = true; + node.val = '*'; + } + + return node; + }) + + /** + * Star: "*" + */ + + .capture('star', function() { + var pos = this.position(); + var starRe = /^(?:\*(?![*(])|[*]{3,}(?!\()|[*]{2}(?![(/]|$)|\*(?=\*\())/; + var m = this.match(starRe); + if (!m) return; + + this.state.metachar = true; + this.state.star = true; + return pos({ + type: 'star', + val: m[0] + }); + }) + + /** + * Slash: "/" + */ + + .capture('slash', function() { + var pos = this.position(); + var m = this.match(/^\//); + if (!m) return; + + this.state.slashes++; + return pos({ + type: 'slash', + val: m[0] + }); + }) + + /** + * Backslash: "\\" + */ + + .capture('backslash', function() { + var pos = this.position(); + var m = this.match(/^\\(?![*+?(){}[\]'"])/); + if (!m) return; + + var val = m[0]; + + if (this.isInside('bracket')) { + val = '\\'; + } else if (val.length > 1) { + val = '\\\\'; + } + + return pos({ + type: 'backslash', + val: val + }); + }) + + /** + * Square: "[.]" + */ + + .capture('square', function() { + if (this.isInside('bracket')) return; + var pos = this.position(); + var m = this.match(/^\[([^!^\\])\]/); + if (!m) return; + + return pos({ + type: 'square', + val: m[1] + }); + }) + + /** + * Brackets: "[...]" (basic, this can be overridden by other parsers) + */ + + .capture('bracket', function() { + var pos = this.position(); + var m = this.match(/^(?:\[([!^]?)([^\]]+|\]-)(\]|[^*+?]+)|\[)/); + if (!m) return; + + var val = m[0]; + var negated = m[1] ? '^' : ''; + var inner = (m[2] || '').replace(/\\\\+/, '\\\\'); + var close = m[3] || ''; + + if (m[2] && inner.length < m[2].length) { + val = val.replace(/\\\\+/, '\\\\'); + } + + var esc = this.input.slice(0, 2); + if (inner === '' && esc === '\\]') { + inner += esc; + this.consume(2); + + var str = this.input; + var idx = -1; + var ch; + + while ((ch = str[++idx])) { + this.consume(1); + if (ch === ']') { + close = ch; + break; + } + inner += ch; + } + } + + return pos({ + type: 'bracket', + val: val, + escaped: close !== ']', + negated: negated, + inner: inner, + close: close + }); + }) + + /** + * Text + */ + + .capture('text', function() { + if (this.isInside('bracket')) return; + var pos = this.position(); + var m = this.match(not); + if (!m || !m[0]) return; + + return pos({ + type: 'text', + val: m[0] + }); + }); + + /** + * Allow custom parsers to be passed on options + */ + + if (options && typeof options.parsers === 'function') { + options.parsers(nanomatch.parser); + } +}; + +/** + * Advance to the next non-escaped character + */ + +function advanceTo(input, endChar) { + var ch = input.charAt(0); + var tok = { len: 1, val: '', esc: '' }; + var idx = 0; + + function advance() { + if (ch !== '\\') { + tok.esc += '\\' + ch; + tok.val += ch; + } + + ch = input.charAt(++idx); + tok.len++; + + if (ch === '\\') { + advance(); + advance(); + } + } + + while (ch && ch !== endChar) { + advance(); + } + return tok; +} + +/** + * Create text regex + */ + +function createTextRegex(pattern) { + if (cached) return cached; + var opts = {contains: true, strictClose: false}; + var not = regexNot.create(pattern, opts); + var re = toRegex('^(?:[*]\\((?=.)|' + not + ')', opts); + return (cached = re); +} + +/** + * Expose negation string + */ + +module.exports.not = NOT_REGEX; diff --git a/node_modules/nanomatch/lib/utils.js b/node_modules/nanomatch/lib/utils.js new file mode 100644 index 00000000..0cf1501d --- /dev/null +++ b/node_modules/nanomatch/lib/utils.js @@ -0,0 +1,379 @@ +'use strict'; + +var utils = module.exports; +var path = require('path'); + +/** + * Module dependencies + */ + +var isWindows = require('is-windows')(); +var Snapdragon = require('snapdragon'); +utils.define = require('define-property'); +utils.diff = require('arr-diff'); +utils.extend = require('extend-shallow'); +utils.pick = require('object.pick'); +utils.typeOf = require('kind-of'); +utils.unique = require('array-unique'); + +/** + * Returns true if the given value is effectively an empty string + */ + +utils.isEmptyString = function(val) { + return String(val) === '' || String(val) === './'; +}; + +/** + * Returns true if the platform is windows, or `path.sep` is `\\`. + * This is defined as a function to allow `path.sep` to be set in unit tests, + * or by the user, if there is a reason to do so. + * @return {Boolean} + */ + +utils.isWindows = function() { + return path.sep === '\\' || isWindows === true; +}; + +/** + * Return the last element from an array + */ + +utils.last = function(arr, n) { + return arr[arr.length - (n || 1)]; +}; + +/** + * Get the `Snapdragon` instance to use + */ + +utils.instantiate = function(ast, options) { + var snapdragon; + // if an instance was created by `.parse`, use that instance + if (utils.typeOf(ast) === 'object' && ast.snapdragon) { + snapdragon = ast.snapdragon; + // if the user supplies an instance on options, use that instance + } else if (utils.typeOf(options) === 'object' && options.snapdragon) { + snapdragon = options.snapdragon; + // create a new instance + } else { + snapdragon = new Snapdragon(options); + } + + utils.define(snapdragon, 'parse', function(str, options) { + var parsed = Snapdragon.prototype.parse.call(this, str, options); + parsed.input = str; + + // escape unmatched brace/bracket/parens + var last = this.parser.stack.pop(); + if (last && this.options.strictErrors !== true) { + var open = last.nodes[0]; + var inner = last.nodes[1]; + if (last.type === 'bracket') { + if (inner.val.charAt(0) === '[') { + inner.val = '\\' + inner.val; + } + + } else { + open.val = '\\' + open.val; + var sibling = open.parent.nodes[1]; + if (sibling.type === 'star') { + sibling.loose = true; + } + } + } + + // add non-enumerable parser reference + utils.define(parsed, 'parser', this.parser); + return parsed; + }); + + return snapdragon; +}; + +/** + * Create the key to use for memoization. The key is generated + * by iterating over the options and concatenating key-value pairs + * to the pattern string. + */ + +utils.createKey = function(pattern, options) { + if (typeof options === 'undefined') { + return pattern; + } + var key = pattern; + for (var prop in options) { + if (options.hasOwnProperty(prop)) { + key += ';' + prop + '=' + String(options[prop]); + } + } + return key; +}; + +/** + * Cast `val` to an array + * @return {Array} + */ + +utils.arrayify = function(val) { + if (typeof val === 'string') return [val]; + return val ? (Array.isArray(val) ? val : [val]) : []; +}; + +/** + * Return true if `val` is a non-empty string + */ + +utils.isString = function(val) { + return typeof val === 'string'; +}; + +/** + * Return true if `val` is a non-empty string + */ + +utils.isRegex = function(val) { + return utils.typeOf(val) === 'regexp'; +}; + +/** + * Return true if `val` is a non-empty string + */ + +utils.isObject = function(val) { + return utils.typeOf(val) === 'object'; +}; + +/** + * Escape regex characters in the given string + */ + +utils.escapeRegex = function(str) { + return str.replace(/[-[\]{}()^$|*+?.\\/\s]/g, '\\$&'); +}; + +/** + * Combines duplicate characters in the provided `input` string. + * @param {String} `input` + * @returns {String} + */ + +utils.combineDupes = function(input, patterns) { + patterns = utils.arrayify(patterns).join('|').split('|'); + patterns = patterns.map(function(s) { + return s.replace(/\\?([+*\\/])/g, '\\$1'); + }); + var substr = patterns.join('|'); + var regex = new RegExp('(' + substr + ')(?=\\1)', 'g'); + return input.replace(regex, ''); +}; + +/** + * Returns true if the given `str` has special characters + */ + +utils.hasSpecialChars = function(str) { + return /(?:(?:(^|\/)[!.])|[*?+()|[\]{}]|[+@]\()/.test(str); +}; + +/** + * Normalize slashes in the given filepath. + * + * @param {String} `filepath` + * @return {String} + */ + +utils.toPosixPath = function(str) { + return str.replace(/\\+/g, '/'); +}; + +/** + * Strip backslashes before special characters in a string. + * + * @param {String} `str` + * @return {String} + */ + +utils.unescape = function(str) { + return utils.toPosixPath(str.replace(/\\(?=[*+?!.])/g, '')); +}; + +/** + * Strip the drive letter from a windows filepath + * @param {String} `fp` + * @return {String} + */ + +utils.stripDrive = function(fp) { + return utils.isWindows() ? fp.replace(/^[a-z]:[\\/]+?/i, '/') : fp; +}; + +/** + * Strip the prefix from a filepath + * @param {String} `fp` + * @return {String} + */ + +utils.stripPrefix = function(str) { + if (str.charAt(0) === '.' && (str.charAt(1) === '/' || str.charAt(1) === '\\')) { + return str.slice(2); + } + return str; +}; + +/** + * Returns true if `str` is a common character that doesn't need + * to be processed to be used for matching. + * @param {String} `str` + * @return {Boolean} + */ + +utils.isSimpleChar = function(str) { + return str.trim() === '' || str === '.'; +}; + +/** + * Returns true if the given str is an escaped or + * unescaped path character + */ + +utils.isSlash = function(str) { + return str === '/' || str === '\\/' || str === '\\' || str === '\\\\'; +}; + +/** + * Returns a function that returns true if the given + * pattern matches or contains a `filepath` + * + * @param {String} `pattern` + * @return {Function} + */ + +utils.matchPath = function(pattern, options) { + return (options && options.contains) + ? utils.containsPattern(pattern, options) + : utils.equalsPattern(pattern, options); +}; + +/** + * Returns true if the given (original) filepath or unixified path are equal + * to the given pattern. + */ + +utils._equals = function(filepath, unixPath, pattern) { + return pattern === filepath || pattern === unixPath; +}; + +/** + * Returns true if the given (original) filepath or unixified path contain + * the given pattern. + */ + +utils._contains = function(filepath, unixPath, pattern) { + return filepath.indexOf(pattern) !== -1 || unixPath.indexOf(pattern) !== -1; +}; + +/** + * Returns a function that returns true if the given + * pattern is the same as a given `filepath` + * + * @param {String} `pattern` + * @return {Function} + */ + +utils.equalsPattern = function(pattern, options) { + var unixify = utils.unixify(options); + options = options || {}; + + return function fn(filepath) { + var equal = utils._equals(filepath, unixify(filepath), pattern); + if (equal === true || options.nocase !== true) { + return equal; + } + var lower = filepath.toLowerCase(); + return utils._equals(lower, unixify(lower), pattern); + }; +}; + +/** + * Returns a function that returns true if the given + * pattern contains a `filepath` + * + * @param {String} `pattern` + * @return {Function} + */ + +utils.containsPattern = function(pattern, options) { + var unixify = utils.unixify(options); + options = options || {}; + + return function(filepath) { + var contains = utils._contains(filepath, unixify(filepath), pattern); + if (contains === true || options.nocase !== true) { + return contains; + } + var lower = filepath.toLowerCase(); + return utils._contains(lower, unixify(lower), pattern); + }; +}; + +/** + * Returns a function that returns true if the given + * regex matches the `filename` of a file path. + * + * @param {RegExp} `re` Matching regex + * @return {Function} + */ + +utils.matchBasename = function(re) { + return function(filepath) { + return re.test(filepath) || re.test(path.basename(filepath)); + }; +}; + +/** + * Returns the given value unchanced. + * @return {any} + */ + +utils.identity = function(val) { + return val; +}; + +/** + * Determines the filepath to return based on the provided options. + * @return {any} + */ + +utils.value = function(str, unixify, options) { + if (options && options.unixify === false) { + return str; + } + if (options && typeof options.unixify === 'function') { + return options.unixify(str); + } + return unixify(str); +}; + +/** + * Returns a function that normalizes slashes in a string to forward + * slashes, strips `./` from beginning of paths, and optionally unescapes + * special characters. + * @return {Function} + */ + +utils.unixify = function(options) { + var opts = options || {}; + return function(filepath) { + if (opts.stripPrefix !== false) { + filepath = utils.stripPrefix(filepath); + } + if (opts.unescape === true) { + filepath = utils.unescape(filepath); + } + if (opts.unixify === true || utils.isWindows()) { + filepath = utils.toPosixPath(filepath); + } + return filepath; + }; +}; diff --git a/node_modules/nanomatch/node_modules/define-property/CHANGELOG.md b/node_modules/nanomatch/node_modules/define-property/CHANGELOG.md new file mode 100644 index 00000000..901c8aae --- /dev/null +++ b/node_modules/nanomatch/node_modules/define-property/CHANGELOG.md @@ -0,0 +1,82 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [2.0.0] - 2017-04-20 + +### Changed + +- Now supports data descriptors in addition to accessor descriptors. +- Now uses [Reflect.defineProperty][reflect] when available, otherwise falls back to [Object.defineProperty][object]. + +## [1.0.0] - 2017-04-20 + +- stable release + +## [0.2.5] - 2015-08-31 + +- use is-descriptor + +## [0.2.3] - 2015-08-29 + +- check keys length + +## [0.2.2] - 2015-08-27 + +- ensure val is an object + +## [0.2.1] - 2015-08-27 + +- support functions + +## [0.2.0] - 2015-08-27 + +- support get/set +- update docs + +## [0.1.0] - 2015-08-12 + +- first commit + +[2.0.0]: https://github.com/jonschlinkert/define-property/compare/1.0.0...2.0.0 +[1.0.0]: https://github.com/jonschlinkert/define-property/compare/0.2.5...1.0.0 +[0.2.5]: https://github.com/jonschlinkert/define-property/compare/0.2.3...0.2.5 +[0.2.3]: https://github.com/jonschlinkert/define-property/compare/0.2.2...0.2.3 +[0.2.2]: https://github.com/jonschlinkert/define-property/compare/0.2.1...0.2.2 +[0.2.1]: https://github.com/jonschlinkert/define-property/compare/0.2.0...0.2.1 +[0.2.0]: https://github.com/jonschlinkert/define-property/compare/0.1.3...0.2.0 + +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog + +[object]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/defineProperty +[reflect]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect/defineProperty diff --git a/node_modules/nanomatch/node_modules/define-property/LICENSE b/node_modules/nanomatch/node_modules/define-property/LICENSE new file mode 100644 index 00000000..f8de0630 --- /dev/null +++ b/node_modules/nanomatch/node_modules/define-property/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/nanomatch/node_modules/define-property/README.md b/node_modules/nanomatch/node_modules/define-property/README.md new file mode 100644 index 00000000..f1ee8f92 --- /dev/null +++ b/node_modules/nanomatch/node_modules/define-property/README.md @@ -0,0 +1,117 @@ +# define-property [![NPM version](https://img.shields.io/npm/v/define-property.svg?style=flat)](https://www.npmjs.com/package/define-property) [![NPM monthly downloads](https://img.shields.io/npm/dm/define-property.svg?style=flat)](https://npmjs.org/package/define-property) [![NPM total downloads](https://img.shields.io/npm/dt/define-property.svg?style=flat)](https://npmjs.org/package/define-property) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/define-property.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/define-property) + +> Define a non-enumerable property on an object. Uses Reflect.defineProperty when available, otherwise Object.defineProperty. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save define-property +``` + +## Release history + +See [the CHANGELOG](changelog.md) for updates. + +## Usage + +**Params** + +* `object`: The object on which to define the property. +* `key`: The name of the property to be defined or modified. +* `value`: The value or descriptor of the property being defined or modified. + +```js +var define = require('define-property'); +var obj = {}; +define(obj, 'foo', function(val) { + return val.toUpperCase(); +}); + +// by default, defined properties are non-enumberable +console.log(obj); +//=> {} + +console.log(obj.foo('bar')); +//=> 'BAR' +``` + +**defining setters/getters** + +Pass the same properties you would if using [Object.defineProperty](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/defineProperty) or [Reflect.defineProperty](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect/defineProperty). + +```js +define(obj, 'foo', { + set: function() {}, + get: function() {} +}); +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target… [more](https://github.com/jonschlinkert/assign-deep) | [homepage](https://github.com/jonschlinkert/assign-deep "Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target (first) object.") +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [merge-deep](https://www.npmjs.com/package/merge-deep): Recursively merge values in a javascript object. | [homepage](https://github.com/jonschlinkert/merge-deep "Recursively merge values in a javascript object.") +* [mixin-deep](https://www.npmjs.com/package/mixin-deep): Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone. | [homepage](https://github.com/jonschlinkert/mixin-deep "Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 28 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [doowb](https://github.com/doowb) | + +### Author + +**Jon Schlinkert** + +* Connect with me on [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* Follow me on [github/jonschlinkert](https://github.com/jonschlinkert) +* Follow me on [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on January 25, 2018._ \ No newline at end of file diff --git a/node_modules/nanomatch/node_modules/define-property/index.js b/node_modules/nanomatch/node_modules/define-property/index.js new file mode 100644 index 00000000..0efa0a9e --- /dev/null +++ b/node_modules/nanomatch/node_modules/define-property/index.js @@ -0,0 +1,38 @@ +/*! + * define-property + * + * Copyright (c) 2015-2018, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isobject = require('isobject'); +var isDescriptor = require('is-descriptor'); +var define = (typeof Reflect !== 'undefined' && Reflect.defineProperty) + ? Reflect.defineProperty + : Object.defineProperty; + +module.exports = function defineProperty(obj, key, val) { + if (!isobject(obj) && typeof obj !== 'function' && !Array.isArray(obj)) { + throw new TypeError('expected an object, function, or array'); + } + + if (typeof key !== 'string') { + throw new TypeError('expected "key" to be a string'); + } + + if (isDescriptor(val)) { + define(obj, key, val); + return obj; + } + + define(obj, key, { + configurable: true, + enumerable: false, + writable: true, + value: val + }); + + return obj; +}; diff --git a/node_modules/nanomatch/node_modules/define-property/package.json b/node_modules/nanomatch/node_modules/define-property/package.json new file mode 100644 index 00000000..f8fd21cb --- /dev/null +++ b/node_modules/nanomatch/node_modules/define-property/package.json @@ -0,0 +1,67 @@ +{ + "name": "define-property", + "description": "Define a non-enumerable property on an object. Uses Reflect.defineProperty when available, otherwise Object.defineProperty.", + "version": "2.0.2", + "homepage": "https://github.com/jonschlinkert/define-property", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/define-property", + "bugs": { + "url": "https://github.com/jonschlinkert/define-property/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-descriptor": "^1.0.2", + "isobject": "^3.0.1" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "define", + "define-property", + "enumerable", + "key", + "non", + "non-enumerable", + "object", + "prop", + "property", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "assign-deep", + "extend-shallow", + "merge-deep", + "mixin-deep" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/nanomatch/node_modules/extend-shallow/LICENSE b/node_modules/nanomatch/node_modules/extend-shallow/LICENSE new file mode 100644 index 00000000..99c93691 --- /dev/null +++ b/node_modules/nanomatch/node_modules/extend-shallow/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, 2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/nanomatch/node_modules/extend-shallow/README.md b/node_modules/nanomatch/node_modules/extend-shallow/README.md new file mode 100644 index 00000000..dee226f4 --- /dev/null +++ b/node_modules/nanomatch/node_modules/extend-shallow/README.md @@ -0,0 +1,97 @@ +# extend-shallow [![NPM version](https://img.shields.io/npm/v/extend-shallow.svg?style=flat)](https://www.npmjs.com/package/extend-shallow) [![NPM monthly downloads](https://img.shields.io/npm/dm/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![NPM total downloads](https://img.shields.io/npm/dt/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/extend-shallow.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/extend-shallow) + +> Extend an object with the properties of additional objects. node.js/javascript util. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save extend-shallow +``` + +## Usage + +```js +var extend = require('extend-shallow'); + +extend({a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +Pass an empty object to shallow clone: + +```js +var obj = {}; +extend(obj, {a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [for-in](https://www.npmjs.com/package/for-in): Iterate over the own and inherited enumerable properties of an object, and return an object… [more](https://github.com/jonschlinkert/for-in) | [homepage](https://github.com/jonschlinkert/for-in "Iterate over the own and inherited enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js") +* [for-own](https://www.npmjs.com/package/for-own): Iterate over the own enumerable properties of an object, and return an object with properties… [more](https://github.com/jonschlinkert/for-own) | [homepage](https://github.com/jonschlinkert/for-own "Iterate over the own enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 33 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [pdehaan](https://github.com/pdehaan) | + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on November 19, 2017._ \ No newline at end of file diff --git a/node_modules/nanomatch/node_modules/extend-shallow/index.js b/node_modules/nanomatch/node_modules/extend-shallow/index.js new file mode 100644 index 00000000..c9582f8f --- /dev/null +++ b/node_modules/nanomatch/node_modules/extend-shallow/index.js @@ -0,0 +1,60 @@ +'use strict'; + +var isExtendable = require('is-extendable'); +var assignSymbols = require('assign-symbols'); + +module.exports = Object.assign || function(obj/*, objects*/) { + if (obj === null || typeof obj === 'undefined') { + throw new TypeError('Cannot convert undefined or null to object'); + } + if (!isObject(obj)) { + obj = {}; + } + for (var i = 1; i < arguments.length; i++) { + var val = arguments[i]; + if (isString(val)) { + val = toObject(val); + } + if (isObject(val)) { + assign(obj, val); + assignSymbols(obj, val); + } + } + return obj; +}; + +function assign(a, b) { + for (var key in b) { + if (hasOwn(b, key)) { + a[key] = b[key]; + } + } +} + +function isString(val) { + return (val && typeof val === 'string'); +} + +function toObject(str) { + var obj = {}; + for (var i in str) { + obj[i] = str[i]; + } + return obj; +} + +function isObject(val) { + return (val && typeof val === 'object') || isExtendable(val); +} + +/** + * Returns true if the given `key` is an own property of `obj`. + */ + +function hasOwn(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +function isEnum(obj, key) { + return Object.prototype.propertyIsEnumerable.call(obj, key); +} diff --git a/node_modules/nanomatch/node_modules/extend-shallow/package.json b/node_modules/nanomatch/node_modules/extend-shallow/package.json new file mode 100644 index 00000000..e5e91053 --- /dev/null +++ b/node_modules/nanomatch/node_modules/extend-shallow/package.json @@ -0,0 +1,83 @@ +{ + "name": "extend-shallow", + "description": "Extend an object with the properties of additional objects. node.js/javascript util.", + "version": "3.0.2", + "homepage": "https://github.com/jonschlinkert/extend-shallow", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Peter deHaan (http://about.me/peterdehaan)" + ], + "repository": "jonschlinkert/extend-shallow", + "bugs": { + "url": "https://github.com/jonschlinkert/extend-shallow/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "devDependencies": { + "array-slice": "^1.0.0", + "benchmarked": "^2.0.0", + "for-own": "^1.0.0", + "gulp-format-md": "^1.0.0", + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.1", + "minimist": "^1.2.0", + "mocha": "^3.5.3", + "object-assign": "^4.1.1" + }, + "keywords": [ + "assign", + "clone", + "extend", + "merge", + "obj", + "object", + "object-assign", + "object.assign", + "prop", + "properties", + "property", + "props", + "shallow", + "util", + "utility", + "utils", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "related": { + "list": [ + "extend-shallow", + "for-in", + "for-own", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/nanomatch/node_modules/is-extendable/LICENSE b/node_modules/nanomatch/node_modules/is-extendable/LICENSE new file mode 100644 index 00000000..c0d7f136 --- /dev/null +++ b/node_modules/nanomatch/node_modules/is-extendable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/nanomatch/node_modules/is-extendable/README.md b/node_modules/nanomatch/node_modules/is-extendable/README.md new file mode 100644 index 00000000..875b56a7 --- /dev/null +++ b/node_modules/nanomatch/node_modules/is-extendable/README.md @@ -0,0 +1,88 @@ +# is-extendable [![NPM version](https://img.shields.io/npm/v/is-extendable.svg?style=flat)](https://www.npmjs.com/package/is-extendable) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![NPM total downloads](https://img.shields.io/npm/dt/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-extendable.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-extendable) + +> Returns true if a value is a plain object, array or function. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extendable +``` + +## Usage + +```js +var isExtendable = require('is-extendable'); +``` + +Returns true if the value is any of the following: + +* array +* plain object +* function + +## Notes + +All objects in JavaScript can have keys, but it's a pain to check for this, since we ether need to verify that the value is not `null` or `undefined` and: + +* the value is not a primitive, or +* that the object is a plain object, function or array + +Also note that an `extendable` object is not the same as an [extensible object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/isExtensible), which is one that (in es6) is not sealed, frozen, or marked as non-extensible using `preventExtensions`. + +## Release history + +### v1.0.0 - 2017/07/20 + +**Breaking changes** + +* No longer considers date, regex or error objects to be extendable + +## About + +### Related projects + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target… [more](https://github.com/jonschlinkert/assign-deep) | [homepage](https://github.com/jonschlinkert/assign-deep "Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target (first) object.") +* [is-equal-shallow](https://www.npmjs.com/package/is-equal-shallow): Does a shallow comparison of two objects, returning false if the keys or values differ. | [homepage](https://github.com/jonschlinkert/is-equal-shallow "Does a shallow comparison of two objects, returning false if the keys or values differ.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 20, 2017._ \ No newline at end of file diff --git a/node_modules/nanomatch/node_modules/is-extendable/index.d.ts b/node_modules/nanomatch/node_modules/is-extendable/index.d.ts new file mode 100644 index 00000000..b96d5075 --- /dev/null +++ b/node_modules/nanomatch/node_modules/is-extendable/index.d.ts @@ -0,0 +1,5 @@ +export = isExtendable; + +declare function isExtendable(val: any): boolean; + +declare namespace isExtendable {} diff --git a/node_modules/nanomatch/node_modules/is-extendable/index.js b/node_modules/nanomatch/node_modules/is-extendable/index.js new file mode 100644 index 00000000..a8b26ad0 --- /dev/null +++ b/node_modules/nanomatch/node_modules/is-extendable/index.js @@ -0,0 +1,14 @@ +/*! + * is-extendable + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isPlainObject = require('is-plain-object'); + +module.exports = function isExtendable(val) { + return isPlainObject(val) || typeof val === 'function' || Array.isArray(val); +}; diff --git a/node_modules/nanomatch/node_modules/is-extendable/package.json b/node_modules/nanomatch/node_modules/is-extendable/package.json new file mode 100644 index 00000000..2aaab65a --- /dev/null +++ b/node_modules/nanomatch/node_modules/is-extendable/package.json @@ -0,0 +1,67 @@ +{ + "name": "is-extendable", + "description": "Returns true if a value is a plain object, array or function.", + "version": "1.0.1", + "homepage": "https://github.com/jonschlinkert/is-extendable", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extendable", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extendable/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "index.d.ts" + ], + "main": "index.js", + "types": "index.d.ts", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.4.2" + }, + "keywords": [ + "array", + "assign", + "check", + "date", + "extend", + "extendable", + "extensible", + "function", + "is", + "object", + "regex", + "test" + ], + "verb": { + "related": { + "list": [ + "assign-deep", + "is-equal-shallow", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/nanomatch/node_modules/kind-of/CHANGELOG.md b/node_modules/nanomatch/node_modules/kind-of/CHANGELOG.md new file mode 100644 index 00000000..fb30b06d --- /dev/null +++ b/node_modules/nanomatch/node_modules/kind-of/CHANGELOG.md @@ -0,0 +1,157 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [6.0.0] - 2017-10-13 + +- refactor code to be more performant +- refactor benchmarks + +## [5.1.0] - 2017-10-13 + +**Added** + +- Merge pull request #15 from aretecode/patch-1 +- adds support and tests for string & array iterators + +**Changed** + +- updates benchmarks + +## [5.0.2] - 2017-08-02 + +- Merge pull request #14 from struct78/master +- Added `undefined` check + +## [5.0.0] - 2017-06-21 + +- Merge pull request #12 from aretecode/iterator +- Set Iterator + Map Iterator +- streamline `isbuffer`, minor edits + +## [4.0.0] - 2017-05-19 + +- Merge pull request #8 from tunnckoCore/master +- update deps + +## [3.2.2] - 2017-05-16 + +- fix version + +## [3.2.1] - 2017-05-16 + +- add browserify + +## [3.2.0] - 2017-04-25 + +- Merge pull request #10 from ksheedlo/unrequire-buffer +- add `promise` support and tests +- Remove unnecessary `Buffer` check + +## [3.1.0] - 2016-12-07 + +- Merge pull request #7 from laggingreflex/err +- add support for `error` and tests +- run update + +## [3.0.4] - 2016-07-29 + +- move tests +- run update + +## [3.0.3] - 2016-05-03 + +- fix prepublish script +- remove unused dep + +## [3.0.0] - 2015-11-17 + +- add typed array support +- Merge pull request #5 from miguelmota/typed-arrays +- adds new tests + +## [2.0.1] - 2015-08-21 + +- use `is-buffer` module + +## [2.0.0] - 2015-05-31 + +- Create fallback for `Array.isArray` if used as a browser package +- Merge pull request #2 from dtothefp/patch-1 +- Merge pull request #3 from pdehaan/patch-1 +- Merge branch 'master' of https://github.com/chorks/kind-of into chorks-master +- optimizations, mostly date and regex + +## [1.1.0] - 2015-02-09 + +- adds `buffer` support +- adds tests for `buffer` + +## [1.0.0] - 2015-01-19 + +- update benchmarks +- optimizations based on benchmarks + +## [0.1.2] - 2014-10-26 + +- return `typeof` value if it's not an object. very slight speed improvement +- use `.slice` +- adds benchmarks + +## [0.1.0] - 2014-9-26 + +- first commit + +[6.0.0]: https://github.com/jonschlinkert/kind-of/compare/5.1.0...6.0.0 +[5.1.0]: https://github.com/jonschlinkert/kind-of/compare/5.0.2...5.1.0 +[5.0.2]: https://github.com/jonschlinkert/kind-of/compare/5.0.1...5.0.2 +[5.0.1]: https://github.com/jonschlinkert/kind-of/compare/5.0.0...5.0.1 +[5.0.0]: https://github.com/jonschlinkert/kind-of/compare/4.0.0...5.0.0 +[4.0.0]: https://github.com/jonschlinkert/kind-of/compare/3.2.2...4.0.0 +[3.2.2]: https://github.com/jonschlinkert/kind-of/compare/3.2.1...3.2.2 +[3.2.1]: https://github.com/jonschlinkert/kind-of/compare/3.2.0...3.2.1 +[3.2.0]: https://github.com/jonschlinkert/kind-of/compare/3.1.0...3.2.0 +[3.1.0]: https://github.com/jonschlinkert/kind-of/compare/3.0.4...3.1.0 +[3.0.4]: https://github.com/jonschlinkert/kind-of/compare/3.0.3...3.0.4 +[3.0.3]: https://github.com/jonschlinkert/kind-of/compare/3.0.0...3.0.3 +[3.0.0]: https://github.com/jonschlinkert/kind-of/compare/2.0.1...3.0.0 +[2.0.1]: https://github.com/jonschlinkert/kind-of/compare/2.0.0...2.0.1 +[2.0.0]: https://github.com/jonschlinkert/kind-of/compare/1.1.0...2.0.0 +[1.1.0]: https://github.com/jonschlinkert/kind-of/compare/1.0.0...1.1.0 +[1.0.0]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...1.0.0 +[0.1.2]: https://github.com/jonschlinkert/kind-of/compare/0.1.0...0.1.2 +[0.1.0]: https://github.com/jonschlinkert/kind-of/commit/2fae09b0b19b1aadb558e9be39f0c3ef6034eb87 + +[Unreleased]: https://github.com/jonschlinkert/kind-of/compare/0.1.2...HEAD +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog + diff --git a/node_modules/nanomatch/node_modules/kind-of/LICENSE b/node_modules/nanomatch/node_modules/kind-of/LICENSE new file mode 100644 index 00000000..3f2eca18 --- /dev/null +++ b/node_modules/nanomatch/node_modules/kind-of/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/nanomatch/node_modules/kind-of/README.md b/node_modules/nanomatch/node_modules/kind-of/README.md new file mode 100644 index 00000000..4b0d4a81 --- /dev/null +++ b/node_modules/nanomatch/node_modules/kind-of/README.md @@ -0,0 +1,365 @@ +# kind-of [![NPM version](https://img.shields.io/npm/v/kind-of.svg?style=flat)](https://www.npmjs.com/package/kind-of) [![NPM monthly downloads](https://img.shields.io/npm/dm/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![NPM total downloads](https://img.shields.io/npm/dt/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/kind-of.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/kind-of) + +> Get the native type of a value. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save kind-of +``` + +Install with [bower](https://bower.io/) + +```sh +$ bower install kind-of --save +``` + +## Why use this? + +1. [it's fast](#benchmarks) | [optimizations](#optimizations) +2. [better type checking](#better-type-checking) + +## Usage + +> es5, es6, and browser ready + +```js +var kindOf = require('kind-of'); + +kindOf(undefined); +//=> 'undefined' + +kindOf(null); +//=> 'null' + +kindOf(true); +//=> 'boolean' + +kindOf(false); +//=> 'boolean' + +kindOf(new Buffer('')); +//=> 'buffer' + +kindOf(42); +//=> 'number' + +kindOf('str'); +//=> 'string' + +kindOf(arguments); +//=> 'arguments' + +kindOf({}); +//=> 'object' + +kindOf(Object.create(null)); +//=> 'object' + +kindOf(new Test()); +//=> 'object' + +kindOf(new Date()); +//=> 'date' + +kindOf([1, 2, 3]); +//=> 'array' + +kindOf(/foo/); +//=> 'regexp' + +kindOf(new RegExp('foo')); +//=> 'regexp' + +kindOf(new Error('error')); +//=> 'error' + +kindOf(function () {}); +//=> 'function' + +kindOf(function * () {}); +//=> 'generatorfunction' + +kindOf(Symbol('str')); +//=> 'symbol' + +kindOf(new Map()); +//=> 'map' + +kindOf(new WeakMap()); +//=> 'weakmap' + +kindOf(new Set()); +//=> 'set' + +kindOf(new WeakSet()); +//=> 'weakset' + +kindOf(new Int8Array()); +//=> 'int8array' + +kindOf(new Uint8Array()); +//=> 'uint8array' + +kindOf(new Uint8ClampedArray()); +//=> 'uint8clampedarray' + +kindOf(new Int16Array()); +//=> 'int16array' + +kindOf(new Uint16Array()); +//=> 'uint16array' + +kindOf(new Int32Array()); +//=> 'int32array' + +kindOf(new Uint32Array()); +//=> 'uint32array' + +kindOf(new Float32Array()); +//=> 'float32array' + +kindOf(new Float64Array()); +//=> 'float64array' +``` + +## Benchmarks + +Benchmarked against [typeof](http://github.com/CodingFu/typeof) and [type-of](https://github.com/ForbesLindesay/type-of). + +```bash +# arguments (32 bytes) + kind-of x 17,024,098 ops/sec ±1.90% (86 runs sampled) + lib-type-of x 11,926,235 ops/sec ±1.34% (83 runs sampled) + lib-typeof x 9,245,257 ops/sec ±1.22% (87 runs sampled) + + fastest is kind-of (by 161% avg) + +# array (22 bytes) + kind-of x 17,196,492 ops/sec ±1.07% (88 runs sampled) + lib-type-of x 8,838,283 ops/sec ±1.02% (87 runs sampled) + lib-typeof x 8,677,848 ops/sec ±0.87% (87 runs sampled) + + fastest is kind-of (by 196% avg) + +# boolean (24 bytes) + kind-of x 16,841,600 ops/sec ±1.10% (86 runs sampled) + lib-type-of x 8,096,787 ops/sec ±0.95% (87 runs sampled) + lib-typeof x 8,423,345 ops/sec ±1.15% (86 runs sampled) + + fastest is kind-of (by 204% avg) + +# buffer (38 bytes) + kind-of x 14,848,060 ops/sec ±1.05% (86 runs sampled) + lib-type-of x 3,671,577 ops/sec ±1.49% (87 runs sampled) + lib-typeof x 8,360,236 ops/sec ±1.24% (86 runs sampled) + + fastest is kind-of (by 247% avg) + +# date (30 bytes) + kind-of x 16,067,761 ops/sec ±1.58% (86 runs sampled) + lib-type-of x 8,954,436 ops/sec ±1.40% (87 runs sampled) + lib-typeof x 8,488,307 ops/sec ±1.51% (84 runs sampled) + + fastest is kind-of (by 184% avg) + +# error (36 bytes) + kind-of x 9,634,090 ops/sec ±1.12% (89 runs sampled) + lib-type-of x 7,735,624 ops/sec ±1.32% (86 runs sampled) + lib-typeof x 7,442,160 ops/sec ±1.11% (90 runs sampled) + + fastest is kind-of (by 127% avg) + +# function (34 bytes) + kind-of x 10,031,494 ops/sec ±1.27% (86 runs sampled) + lib-type-of x 9,502,757 ops/sec ±1.17% (89 runs sampled) + lib-typeof x 8,278,985 ops/sec ±1.08% (88 runs sampled) + + fastest is kind-of (by 113% avg) + +# null (24 bytes) + kind-of x 18,159,808 ops/sec ±1.92% (86 runs sampled) + lib-type-of x 12,927,635 ops/sec ±1.01% (88 runs sampled) + lib-typeof x 7,958,234 ops/sec ±1.21% (89 runs sampled) + + fastest is kind-of (by 174% avg) + +# number (22 bytes) + kind-of x 17,846,779 ops/sec ±0.91% (85 runs sampled) + lib-type-of x 3,316,636 ops/sec ±1.19% (86 runs sampled) + lib-typeof x 2,329,477 ops/sec ±2.21% (85 runs sampled) + + fastest is kind-of (by 632% avg) + +# object-plain (47 bytes) + kind-of x 7,085,155 ops/sec ±1.05% (88 runs sampled) + lib-type-of x 8,870,930 ops/sec ±1.06% (83 runs sampled) + lib-typeof x 8,716,024 ops/sec ±1.05% (87 runs sampled) + + fastest is lib-type-of (by 112% avg) + +# regex (25 bytes) + kind-of x 14,196,052 ops/sec ±1.65% (84 runs sampled) + lib-type-of x 9,554,164 ops/sec ±1.25% (88 runs sampled) + lib-typeof x 8,359,691 ops/sec ±1.07% (87 runs sampled) + + fastest is kind-of (by 158% avg) + +# string (33 bytes) + kind-of x 16,131,428 ops/sec ±1.41% (85 runs sampled) + lib-type-of x 7,273,172 ops/sec ±1.05% (87 runs sampled) + lib-typeof x 7,382,635 ops/sec ±1.17% (85 runs sampled) + + fastest is kind-of (by 220% avg) + +# symbol (34 bytes) + kind-of x 17,011,537 ops/sec ±1.24% (86 runs sampled) + lib-type-of x 3,492,454 ops/sec ±1.23% (89 runs sampled) + lib-typeof x 7,471,235 ops/sec ±2.48% (87 runs sampled) + + fastest is kind-of (by 310% avg) + +# template-strings (36 bytes) + kind-of x 15,434,250 ops/sec ±1.46% (83 runs sampled) + lib-type-of x 7,157,907 ops/sec ±0.97% (87 runs sampled) + lib-typeof x 7,517,986 ops/sec ±0.92% (86 runs sampled) + + fastest is kind-of (by 210% avg) + +# undefined (29 bytes) + kind-of x 19,167,115 ops/sec ±1.71% (87 runs sampled) + lib-type-of x 15,477,740 ops/sec ±1.63% (85 runs sampled) + lib-typeof x 19,075,495 ops/sec ±1.17% (83 runs sampled) + + fastest is lib-typeof,kind-of + +``` + +## Optimizations + +In 7 out of 8 cases, this library is 2x-10x faster than other top libraries included in the benchmarks. There are a few things that lead to this performance advantage, none of them hard and fast rules, but all of them simple and repeatable in almost any code library: + +1. Optimize around the fastest and most common use cases first. Of course, this will change from project-to-project, but I took some time to understand how and why `typeof` checks were being used in my own libraries and other libraries I use a lot. +2. Optimize around bottlenecks - In other words, the order in which conditionals are implemented is significant, because each check is only as fast as the failing checks that came before it. Here, the biggest bottleneck by far is checking for plain objects (an object that was created by the `Object` constructor). I opted to make this check happen by process of elimination rather than brute force up front (e.g. by using something like `val.constructor.name`), so that every other type check would not be penalized it. +3. Don't do uneccessary processing - why do `.slice(8, -1).toLowerCase();` just to get the word `regex`? It's much faster to do `if (type === '[object RegExp]') return 'regex'` +4. There is no reason to make the code in a microlib as terse as possible, just to win points for making it shorter. It's always better to favor performant code over terse code. You will always only be using a single `require()` statement to use the library anyway, regardless of how the code is written. + +## Better type checking + +kind-of seems to be more consistently "correct" than other type checking libs I've looked at. For example, here are some differing results from other popular libs: + +### [typeof](https://github.com/CodingFu/typeof) lib + +Incorrectly identifies instances of custom constructors (pretty common): + +```js +var typeOf = require('typeof'); +function Test() {} +console.log(typeOf(new Test())); +//=> 'test' +``` + +Returns `object` instead of `arguments`: + +```js +function foo() { + console.log(typeOf(arguments)) //=> 'object' +} +foo(); +``` + +### [type-of](https://github.com/ForbesLindesay/type-of) lib + +Incorrectly returns `object` for generator functions, buffers, `Map`, `Set`, `WeakMap` and `WeakSet`: + +```js +function * foo() {} +console.log(typeOf(foo)); +//=> 'object' +console.log(typeOf(new Buffer(''))); +//=> 'object' +console.log(typeOf(new Map())); +//=> 'object' +console.log(typeOf(new Set())); +//=> 'object' +console.log(typeOf(new WeakMap())); +//=> 'object' +console.log(typeOf(new WeakSet())); +//=> 'object' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [is-number](https://www.npmjs.com/package/is-number): Returns true if the value is a number. comprehensive tests. | [homepage](https://github.com/jonschlinkert/is-number "Returns true if the value is a number. comprehensive tests.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 98 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [aretecode](https://github.com/aretecode) | +| 2 | [miguelmota](https://github.com/miguelmota) | +| 1 | [dtothefp](https://github.com/dtothefp) | +| 1 | [ianstormtaylor](https://github.com/ianstormtaylor) | +| 1 | [ksheedlo](https://github.com/ksheedlo) | +| 1 | [pdehaan](https://github.com/pdehaan) | +| 1 | [laggingreflex](https://github.com/laggingreflex) | +| 1 | [charlike-old](https://github.com/charlike-old) | + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on December 01, 2017._ \ No newline at end of file diff --git a/node_modules/nanomatch/node_modules/kind-of/index.js b/node_modules/nanomatch/node_modules/kind-of/index.js new file mode 100644 index 00000000..aa2bb394 --- /dev/null +++ b/node_modules/nanomatch/node_modules/kind-of/index.js @@ -0,0 +1,129 @@ +var toString = Object.prototype.toString; + +module.exports = function kindOf(val) { + if (val === void 0) return 'undefined'; + if (val === null) return 'null'; + + var type = typeof val; + if (type === 'boolean') return 'boolean'; + if (type === 'string') return 'string'; + if (type === 'number') return 'number'; + if (type === 'symbol') return 'symbol'; + if (type === 'function') { + return isGeneratorFn(val) ? 'generatorfunction' : 'function'; + } + + if (isArray(val)) return 'array'; + if (isBuffer(val)) return 'buffer'; + if (isArguments(val)) return 'arguments'; + if (isDate(val)) return 'date'; + if (isError(val)) return 'error'; + if (isRegexp(val)) return 'regexp'; + + switch (ctorName(val)) { + case 'Symbol': return 'symbol'; + case 'Promise': return 'promise'; + + // Set, Map, WeakSet, WeakMap + case 'WeakMap': return 'weakmap'; + case 'WeakSet': return 'weakset'; + case 'Map': return 'map'; + case 'Set': return 'set'; + + // 8-bit typed arrays + case 'Int8Array': return 'int8array'; + case 'Uint8Array': return 'uint8array'; + case 'Uint8ClampedArray': return 'uint8clampedarray'; + + // 16-bit typed arrays + case 'Int16Array': return 'int16array'; + case 'Uint16Array': return 'uint16array'; + + // 32-bit typed arrays + case 'Int32Array': return 'int32array'; + case 'Uint32Array': return 'uint32array'; + case 'Float32Array': return 'float32array'; + case 'Float64Array': return 'float64array'; + } + + if (isGeneratorObj(val)) { + return 'generator'; + } + + // Non-plain objects + type = toString.call(val); + switch (type) { + case '[object Object]': return 'object'; + // iterators + case '[object Map Iterator]': return 'mapiterator'; + case '[object Set Iterator]': return 'setiterator'; + case '[object String Iterator]': return 'stringiterator'; + case '[object Array Iterator]': return 'arrayiterator'; + } + + // other + return type.slice(8, -1).toLowerCase().replace(/\s/g, ''); +}; + +function ctorName(val) { + return val.constructor ? val.constructor.name : null; +} + +function isArray(val) { + if (Array.isArray) return Array.isArray(val); + return val instanceof Array; +} + +function isError(val) { + return val instanceof Error || (typeof val.message === 'string' && val.constructor && typeof val.constructor.stackTraceLimit === 'number'); +} + +function isDate(val) { + if (val instanceof Date) return true; + return typeof val.toDateString === 'function' + && typeof val.getDate === 'function' + && typeof val.setDate === 'function'; +} + +function isRegexp(val) { + if (val instanceof RegExp) return true; + return typeof val.flags === 'string' + && typeof val.ignoreCase === 'boolean' + && typeof val.multiline === 'boolean' + && typeof val.global === 'boolean'; +} + +function isGeneratorFn(name, val) { + return ctorName(name) === 'GeneratorFunction'; +} + +function isGeneratorObj(val) { + return typeof val.throw === 'function' + && typeof val.return === 'function' + && typeof val.next === 'function'; +} + +function isArguments(val) { + try { + if (typeof val.length === 'number' && typeof val.callee === 'function') { + return true; + } + } catch (err) { + if (err.message.indexOf('callee') !== -1) { + return true; + } + } + return false; +} + +/** + * If you need to support Safari 5-7 (8-10 yr-old browser), + * take a look at https://github.com/feross/is-buffer + */ + +function isBuffer(val) { + if (val.constructor && typeof val.constructor.isBuffer === 'function') { + return val.constructor.isBuffer(val); + } + return false; +} diff --git a/node_modules/nanomatch/node_modules/kind-of/package.json b/node_modules/nanomatch/node_modules/kind-of/package.json new file mode 100644 index 00000000..73d70aee --- /dev/null +++ b/node_modules/nanomatch/node_modules/kind-of/package.json @@ -0,0 +1,88 @@ +{ + "name": "kind-of", + "description": "Get the native type of a value.", + "version": "6.0.2", + "homepage": "https://github.com/jonschlinkert/kind-of", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "David Fox-Powell (https://dtothefp.github.io/me)", + "James (https://twitter.com/aretecode)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Ken Sheedlo (kensheedlo.com)", + "laggingreflex (https://github.com/laggingreflex)", + "Miguel Mota (https://miguelmota.com)", + "Peter deHaan (http://about.me/peterdehaan)", + "tunnckoCore (https://i.am.charlike.online)" + ], + "repository": "jonschlinkert/kind-of", + "bugs": { + "url": "https://github.com/jonschlinkert/kind-of/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "prepublish": "browserify -o browser.js -e index.js -s index --bare" + }, + "devDependencies": { + "benchmarked": "^2.0.0", + "browserify": "^14.4.0", + "gulp-format-md": "^1.0.0", + "mocha": "^4.0.1", + "write": "^1.0.3" + }, + "keywords": [ + "arguments", + "array", + "boolean", + "check", + "date", + "function", + "is", + "is-type", + "is-type-of", + "kind", + "kind-of", + "number", + "object", + "of", + "regexp", + "string", + "test", + "type", + "type-of", + "typeof", + "types" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "is-glob", + "is-number", + "is-primitive" + ] + }, + "reflinks": [ + "type-of", + "typeof", + "verb" + ] + } +} diff --git a/node_modules/nanomatch/package.json b/node_modules/nanomatch/package.json new file mode 100644 index 00000000..0a85f0aa --- /dev/null +++ b/node_modules/nanomatch/package.json @@ -0,0 +1,134 @@ +{ + "name": "nanomatch", + "description": "Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash 4.3 wildcard support only (no support for exglobs, posix brackets or braces)", + "version": "1.2.13", + "homepage": "https://github.com/micromatch/nanomatch", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Devon Govett (http://badassjs.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/nanomatch", + "bugs": { + "url": "https://github.com/micromatch/nanomatch/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "fragment-cache": "^0.2.1", + "is-windows": "^1.0.2", + "kind-of": "^6.0.2", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "devDependencies": { + "bash-match": "^1.0.2", + "for-own": "^1.0.0", + "gulp": "^3.9.1", + "gulp-format-md": "^1.0.0", + "gulp-istanbul": "^1.1.3", + "gulp-mocha": "^5.0.0", + "helper-changelog": "^0.3.0", + "minimatch": "^3.0.4", + "minimist": "^1.2.0", + "mocha": "^3.5.3", + "multimatch": "^2.1.0" + }, + "keywords": [ + "bash", + "expand", + "expansion", + "expression", + "file", + "files", + "filter", + "find", + "glob", + "globbing", + "globs", + "globstar", + "match", + "matcher", + "matches", + "matching", + "micromatch", + "minimatch", + "multimatch", + "nanomatch", + "path", + "pattern", + "patterns", + "regex", + "regexp", + "regular", + "shell", + "wildcard" + ], + "lintDeps": { + "dependencies": { + "options": { + "lock": { + "snapdragon": "^0.8.1" + } + } + }, + "devDependencies": { + "files": { + "options": { + "ignore": [ + "benchmark/**" + ] + } + } + } + }, + "verb": { + "toc": "collapsible", + "layout": "default", + "tasks": [ + "readme" + ], + "helpers": [ + "helper-changelog" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "extglob", + "is-extglob", + "is-glob", + "micromatch" + ] + }, + "reflinks": [ + "expand-brackets", + "expand-tilde", + "glob-object", + "micromatch", + "minimatch", + "options", + "snapdragon" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/needle/README.md b/node_modules/needle/README.md new file mode 100644 index 00000000..149bacc4 --- /dev/null +++ b/node_modules/needle/README.md @@ -0,0 +1,593 @@ +Needle +====== + +[![NPM](https://nodei.co/npm/needle.png)](https://nodei.co/npm/needle/) + +The leanest and most handsome HTTP client in the Nodelands. + +```js +var needle = require('needle'); + +needle.get('http://www.google.com', function(error, response) { + if (!error && response.statusCode == 200) + console.log(response.body); +}); +``` + +Callbacks not floating your boat? Needle got your back. + +``` js +var data = { + file: '/home/johnlennon/walrus.png', + content_type: 'image/png' +}; + +// the callback is optional, and needle returns a `readableStream` object +// that triggers a 'done' event when the request/response process is complete. +needle + .post('https://my.server.com/foo', data, { multipart: true }) + .on('readable', function() { /* eat your chunks */ }) + .on('done', function(err, resp) { + console.log('Ready-o!'); + }) +``` + +From version 2.0.x up, Promises are also supported. Just call `needle()` directly and you'll get a native Promise object. + +```js +needle('put', 'https://hacking.the.gibson/login', { password: 'god' }, { json: true }) + .then(function(response) { + return doSomethingWith(response) + }) + .catch(function(err) { + console.log('Call the locksmith!') + }) +``` + +With only two real dependencies, Needle supports: + + - HTTP/HTTPS requests, with the usual verbs you would expect + - All of Node's native TLS options, such as 'rejectUnauthorized' (see below) + - Basic & Digest authentication with auto-detection + - Multipart form-data (e.g. file uploads) + - HTTP Proxy forwarding, optionally with authentication + - Streaming gzip or deflate decompression + - Automatic XML & JSON parsing + - 301/302/303 redirect following, with fine-grained tuning, and + - Streaming non-UTF-8 charset decoding, via `iconv-lite` + +And yes, Mr. Wayne, it does come in black. + +This makes Needle an ideal alternative for performing quick HTTP requests in Node, either for API interaction, downloading or uploading streams of data, and so on. If you need OAuth, AWS support or anything fancier, you should check out mikeal's request module. + +Install +------- + +``` +$ npm install needle +``` + +Usage +----- + +```js +// using promises +needle('get', 'https://server.com/posts/12') + .then(function(resp) { + // ... + }) + .catch(function(err) { + // ... + }); + +// with callback +needle.get('ifconfig.me/all.json', function(error, response, body) { + if (error) throw error; + + // body is an alias for `response.body`, + // that in this case holds a JSON-decoded object. + console.log(body.ip_addr); +}); + +// no callback, using streams +var out = fs.createWriteStream('logo.png'); +needle.get('https://google.com/images/logo.png').pipe(out).on('finish', function() { + console.log('Pipe finished!'); +}); +``` + +As you can see, you can use Needle with Promises or without them. When using Promises or when a callback is passed, the response's body will be buffered and written to `response.body`, and the callback will be fired when all of the data has been collected and processed (e.g. decompressed, decoded and/or parsed). + +When no callback is passed, however, the buffering logic will be skipped but the response stream will still go through Needle's processing pipeline, so you get all the benefits of post-processing while keeping the streamishness we all love from Node. + +Response pipeline +----------------- + +Depending on the response's Content-Type, Needle will either attempt to parse JSON or XML streams, or, if a text response was received, will ensure that the final encoding you get is UTF-8. + +You can also request a gzip/deflated response, which, if sent by the server, will be processed before parsing or decoding is performed. + +```js +needle.get('http://stackoverflow.com/feeds', { compressed: true }, function(err, resp) { + console.log(resp.body); // this little guy won't be a Gzipped binary blob + // but a nice object containing all the latest entries +}); +``` + +Or in anti-callback mode, using a few other options: + +```js +var options = { + compressed : true, // sets 'Accept-Encoding' to 'gzip,deflate' + follow_max : 5, // follow up to five redirects + rejectUnauthorized : true // verify SSL certificate +} + +var stream = needle.get('https://backend.server.com/everything.html', options); + +// read the chunks from the 'readable' event, so the stream gets consumed. +stream.on('readable', function() { + while (data = this.read()) { + console.log(data.toString()); + } +}) + +stream.on('done', function(err) { + // if our request had an error, our 'done' event will tell us. + if (!err) console.log('Great success!'); +}) +``` + +API +--- + +### needle(method, url[, data][, options][, callback]) `(> 2.0.x)` + +Calling `needle()` directly returns a Promise. Besides `method` and `url`, all parameters are optional, although when sending a `post`, `put` or `patch` request you will get an error if `data` is not present. + +```js +needle('get', 'http://some.url.com') + .then(function(resp) { console.log(resp.body) }) + .catch(function(err) { console.error(err) }) +}) +``` + +Except from the above, all of Needle's request methods return a Readable stream, and both `options` and `callback` are optional. If passed, the callback will return three arguments: `error`, `response` and `body`, which is basically an alias for `response.body`. + +### needle.head(url[, options][, callback]) + +```js +needle.head('https://my.backend.server.com', { + open_timeout: 5000 // if we're not able to open a connection in 5 seconds, boom. +}, function(err, resp) { + if (err) + console.log('Shoot! Something is wrong: ' + err.message) + else + console.log('Yup, still alive.') +}) +``` + +### needle.get(url[, options][, callback]) + +```js +needle.get('google.com/search?q=syd+barrett', function(err, resp) { + // if no http:// is found, Needle will automagically prepend it. +}); +``` + +### needle.post(url, data[, options][, callback]) + +```js +var options = { + headers: { 'X-Custom-Header': 'Bumbaway atuna' } +} + +needle.post('https://my.app.com/endpoint', 'foo=bar', options, function(err, resp) { + // you can pass params as a string or as an object. +}); +``` + +### needle.put(url, data[, options][, callback]) + +```js +var nested = { + params: { + are: { + also: 'supported' + } + } +} + +needle.put('https://api.app.com/v2', nested, function(err, resp) { + console.log('Got ' + resp.bytes + ' bytes.') // another nice treat from this handsome fella. +}); +``` + +### needle.patch(url, data[, options][, callback]) + +Same behaviour as PUT. + +### needle.delete(url, data[, options][, callback]) + +```js +var options = { + username: 'fidelio', + password: 'x' +} + +needle.delete('https://api.app.com/messages/123', null, options, function(err, resp) { + // in this case, data may be null, but you need to explicity pass it. +}); +``` + +### needle.request(method, url, data[, options][, callback]) + +Generic request. This not only allows for flexibility, but also lets you perform a GET request with data, in which case will be appended to the request as a query string, unless you pass a `json: true` option (read below). + +```js +var params = { + q : 'a very smart query', + page : 2 +} + +needle.request('get', 'forum.com/search', params, function(err, resp) { + if (!err && resp.statusCode == 200) + console.log(resp.body); // here you go, mister. +}); +``` + +Now, if you set pass `json: true` among the options, Needle won't set your params as a querystring but instead send a JSON representation of your data through the request's body, as well as set the `Content-Type` and `Accept` headers to `application/json`. + +```js +needle.request('get', 'forum.com/search', params, { json: true }, function(err, resp) { + if (resp.statusCode == 200) console.log('It worked!'); +}); +``` + +Events +------ + +The [Readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable) object returned by the above request methods emits the following events, in addition to the regular ones (e.g. `end`, `close`, `data`, `pipe`, `readable`). + +### Event: `'response'` + + - `response ` + +Emitted when the underlying [http.ClientRequest](https://nodejs.org/api/http.html#http_class_http_clientrequest) emits a response event. This is after the connection is established and the header received, but before any of it is processed (e.g. authorization required or redirect to be followed). No data has been consumed at this point. + +### Event: `'redirect'` + + - `location ` + +Indicates that the a redirect is being followed. This means that the response code was a redirect (`301`, `302`, `303`, `307`) and the given [redirect options](#redirect-options) allowed following the URL received in the `Location` header. + +### Event: `'header'` + + - `statusCode ` + - `headers ` + +Triggered after the header has been processed, and just before the data is to be consumed. This implies that no redirect was followed and/or authentication header was received. In other words, we got a "valid" response. + +### Event: `'done'` (previously 'end') + + - `exception ` (optional) + +Emitted when the request/response process has finished, either because all data was consumed or an error ocurred somewhere in between. Unlike a regular stream's `end` event, Needle's `done` will be fired either on success or on failure, which is why the first argument may be an Error object. In other words: + +```js +var resp = needle.get('something.worthy/of/being/streamed/by/needle'); +resp.pipe(someWritableStream); + +resp.on('done', function(err) { + if (err) console.log('An error ocurred: ' + err.message); + else console.log('Great success!'); +}) +``` + +### Event: `'err'` + + - `exception ` + +Emitted when an error ocurrs. This should only happen once in the lifecycle of a Needle request. + +### Event: `'timeout'` + + - `type ` + +Emitted when an timeout error occurs. Type can be either 'open', 'response', or 'read'. This will called right before aborting the request, which will also trigger an `err` event, a described above, with an `ECONNRESET` (Socket hang up) exception. + +Request options +--------------- + +For information about options that've changed, there's always [the changelog](https://github.com/tomas/needle/releases). + + - `agent` : Uses an [http.Agent](https://nodejs.org/api/http.html#http_class_http_agent) of your choice, instead of the global, default one. Useful for tweaking the behaviour at the connection level, such as when doing tunneling (see below for an example). + - `json` : When `true`, sets content type to `application/json` and sends request body as JSON string, instead of a query string. + - `open_timeout`: (or `timeout`) Returns error if connection takes longer than X milisecs to establish. Defaults to `10000` (10 secs). `0` means no timeout. + - `response_timeout`: Returns error if no response headers are received in X milisecs, counting from when the connection is opened. Defaults to `0` (no response timeout). + - `read_timeout`: Returns error if data transfer takes longer than X milisecs, once response headers are received. Defaults to `0` (no timeout). + - `follow_max` : (or `follow`) Number of redirects to follow. Defaults to `0`. See below for more redirect options. + - `multipart` : Enables multipart/form-data encoding. Defaults to `false`. Use it when uploading files. + - `proxy` : Forwards request through HTTP(s) proxy. Eg. `proxy: 'http://user:pass@proxy.server.com:3128'`. For more advanced proxying/tunneling use a custom `agent`, as described below. + - `headers` : Object containing custom HTTP headers for request. Overrides defaults described below. + - `auth` : Determines what to do with provided username/password. Options are `auto`, `digest` or `basic` (default). `auto` will detect the type of authentication depending on the response headers. + - `stream_length`: When sending streams, this lets you manually set the Content-Length header --if the stream's bytecount is known beforehand--, preventing ECONNRESET (socket hang up) errors on some servers that misbehave when receiving payloads of unknown size. Set it to `0` and Needle will get and set the stream's length for you, or leave unset for the default behaviour, which is no Content-Length header for stream payloads. + - `localAddress` : , IP address. Passed to http/https request. Local interface from witch the request should be emitted. + +Response options +---------------- + + - `decode_response` : (or `decode`) Whether to decode the text responses to UTF-8, if Content-Type header shows a different charset. Defaults to `true`. + - `parse_response` : (or `parse`) Whether to parse XML or JSON response bodies automagically. Defaults to `true`. You can also set this to 'xml' or 'json' in which case Needle will *only* parse the response if the content type matches. + - `output` : Dump response output to file. This occurs after parsing and charset decoding is done. + - `parse_cookies` : Whether to parse response’s `Set-Cookie` header. Defaults to `true`. If parsed, response cookies will be available at `resp.cookies`. + +HTTP Header options +------------------- + +These are basically shortcuts to the `headers` option described above. + + - `cookies` : Builds and sets a Cookie header from a `{ key: 'value' }` object. + - `compressed`: If `true`, sets 'Accept-Encoding' header to 'gzip,deflate', and inflates content if zipped. Defaults to `false`. + - `username` : For HTTP basic auth. + - `password` : For HTTP basic auth. Requires username to be passed, but is optional. + - `accept` : Sets 'Accept' HTTP header. Defaults to `*/*`. + - `connection`: Sets 'Connection' HTTP header. Not set by default, unless running Node < 0.11.4 in which case it defaults to `close`. More info about this below. + - `user_agent`: Sets the 'User-Agent' HTTP header. Defaults to `Needle/{version} (Node.js {node_version})`. + - `content_type`: Sets the 'Content-Type' header. Unset by default, unless you're sending data in which case it's set accordingly to whatever is being sent (`application/x-www-form-urlencoded`, `application/json` or `multipart/form-data`). That is, of course, unless the option is passed, either here or through `options.headers`. You're the boss. + +Node.js TLS Options +------------------- + +These options are passed directly to `https.request` if present. Taken from the [original documentation](http://nodejs.org/docs/latest/api/https.html): + + - `pfx` : Certificate, Private key and CA certificates to use for SSL. + - `key` : Private key to use for SSL. + - `passphrase` : A string of passphrase for the private key or pfx. + - `cert` : Public x509 certificate to use. + - `ca` : An authority certificate or array of authority certificates to check the remote host against. + - `ciphers` : A string describing the ciphers to use or exclude. + - `rejectUnauthorized` : If true, the server certificate is verified against the list of supplied CAs. An 'error' event is emitted if verification fails. Verification happens at the connection level, before the HTTP request is sent. + - `secureProtocol` : The SSL method to use, e.g. SSLv3_method to force SSL version 3. + +Redirect options +---------------- + +These options only apply if the `follow_max` (or `follow`) option is higher than 0. + + - `follow_set_cookies` : Sends the cookies received in the `set-cookie` header as part of the following request. `false` by default. + - `follow_set_referer` : Sets the 'Referer' header to the requested URI when following a redirect. `false` by default. + - `follow_keep_method` : If enabled, resends the request using the original verb instead of being rewritten to `get` with no data. `false` by default. + - `follow_if_same_host` : When true, Needle will only follow redirects that point to the same host as the original request. `false` by default. + - `follow_if_same_protocol` : When true, Needle will only follow redirects that point to the same protocol as the original request. `false` by default. + +Overriding Defaults +------------------- + +Yes sir, we have it. Needle includes a `defaults()` method, that lets you override some of the defaults for all future requests. Like this: + +```js +needle.defaults({ + open_timeout: 60000, + user_agent: 'MyApp/1.2.3', + parse_response: false }); +``` + +This will override Needle's default user agent and 10-second timeout, and disable response parsing, so you don't need to pass those options in every other request. + +More advanced Proxy support +--------------------------- + +Since you can pass a custom HTTPAgent to Needle you can do all sorts of neat stuff. For example, if you want to use the [`tunnel`](https://github.com/koichik/node-tunnel) module for HTTPS proxying, you can do this: + +```js +var tunnel = require('tunnel'); +var myAgent = tunnel.httpOverHttp({ + proxy: { host: 'localhost' } +}); + +needle.get('foobar.com', { agent: myAgent }); +``` + +Regarding the 'Connection' header +--------------------------------- + +Unless you're running an old version of Node (< 0.11.4), by default Needle won't set the Connection header on requests, yielding Node's default behaviour of keeping the connection alive with the target server. This speeds up inmensely the process of sending several requests to the same host. + +On older versions, however, this has the unwanted behaviour of preventing the runtime from exiting, either because of a bug or 'feature' that was changed on 0.11.4. To overcome this Needle does set the 'Connection' header to 'close' on those versions, however this also means that making new requests to the same host doesn't benefit from Keep-Alive. + +So if you're stuck on 0.10 or even lower and want full speed, you can simply set the Connection header to 'Keep-Alive' by using `{ connection: 'Keep-Alive' }`. Please note, though, that an event loop handler will prevent the runtime from exiting so you'll need to manually call `process.exit()` or the universe will collapse. + +Examples Galore +--------------- + +### HTTPS GET with Basic Auth + +```js +needle.get('https://api.server.com', { username: 'you', password: 'secret' }, + function(err, resp) { + // used HTTP auth +}); +``` + +Or use [RFC-1738](http://tools.ietf.org/html/rfc1738#section-3.1) basic auth URL syntax: + +```js +needle.get('https://username:password@api.server.com', function(err, resp) { + // used HTTP auth from URL +}); +``` + +### Digest Auth + +```js +needle.get('other.server.com', { username: 'you', password: 'secret', auth: 'digest' }, + function(err, resp, body) { + // needle prepends 'http://' to your URL, if missing +}); +``` + +### Custom Accept header, deflate + +```js +var options = { + compressed : true, + follow : 10, + accept : 'application/vnd.github.full+json' +} + +needle.get('api.github.com/users/tomas', options, function(err, resp, body) { + // body will contain a JSON.parse(d) object + // if parsing fails, you'll simply get the original body +}); +``` + +### GET XML object + +```js +needle.get('https://news.ycombinator.com/rss', function(err, resp, body) { + // you'll get a nice object containing the nodes in the RSS +}); +``` + +### GET binary, output to file + +```js +needle.get('http://upload.server.com/tux.png', { output: '/tmp/tux.png' }, function(err, resp, body) { + // you can dump any response to a file, not only binaries. +}); +``` + +### GET through proxy + +```js +needle.get('http://search.npmjs.org', { proxy: 'http://localhost:1234' }, function(err, resp, body) { + // request passed through proxy +}); +``` + +### GET a very large document in a stream (from 0.7+) + +```js +var stream = needle.get('http://www.as35662.net/100.log'); + +stream.on('readable', function() { + var chunk; + while (chunk = this.read()) { + console.log('got data: ', chunk); + } +}); +``` + +### GET JSON object in a stream (from 0.7+) + +```js +var stream = needle.get('http://jsonplaceholder.typicode.com/db', { parse: true }); + +stream.on('readable', function() { + var node; + + // our stream will only emit a single JSON root node. + while (node = this.read()) { + console.log('got data: ', node); + } +}); +``` + +### GET JSONStream flexible parser with search query (from 0.7+) + +```js + + // The 'data' element of this stream will be the string representation + // of the titles of all posts. + +needle.get('http://jsonplaceholder.typicode.com/db', { parse: true }) + .pipe(new JSONStream.parse('posts.*.title')); + .on('data', function (obj) { + console.log('got post title: %s', obj); + }); +``` + +### File upload using multipart, passing file path + +```js +var data = { + foo: 'bar', + image: { file: '/home/tomas/linux.png', content_type: 'image/png' } +} + +needle.post('http://my.other.app.com', data, { multipart: true }, function(err, resp, body) { + // needle will read the file and include it in the form-data as binary +}); +``` + +### Stream upload, PUT or POST + +``` js +needle.put('https://api.app.com/v2', fs.createReadStream('myfile.txt'), function(err, resp, body) { + // stream content is uploaded verbatim +}); +``` + +### Multipart POST, passing data buffer + +```js +var buffer = fs.readFileSync('/path/to/package.zip'); + +var data = { + zip_file: { + buffer : buffer, + filename : 'mypackage.zip', + content_type : 'application/octet-stream' + } +} + +needle.post('http://somewhere.com/over/the/rainbow', data, { multipart: true }, function(err, resp, body) { + // if you see, when using buffers we need to pass the filename for the multipart body. + // you can also pass a filename when using the file path method, in case you want to override + // the default filename to be received on the other end. +}); +``` + +### Multipart with custom Content-Type + +```js +var data = { + token: 'verysecret', + payload: { + value: JSON.stringify({ title: 'test', version: 1 }), + content_type: 'application/json' + } +} + +needle.post('http://test.com/', data, { timeout: 5000, multipart: true }, function(err, resp, body) { + // in this case, if the request takes more than 5 seconds + // the callback will return a [Socket closed] error +}); +``` + +For even more examples, check out the examples directory in the repo. + +### Testing + +To run tests, you need to generate a self-signed SSL certificate in the `test` directory. After cloning the repository, run the following commands: + + $ mkdir -p test/keys + $ openssl genrsa -out test/keys/ssl.key 2048 + $ openssl req -new -key test/keys/ssl.key -x509 -days 999 -out test/keys/ssl.cert + +Then you should be able to run `npm test` once you have the dependencies in place. + +> Note: Tests currently only work on linux-based environments that have `/proc/self/fd`. They *do not* work on MacOS environments. +> You can use Docker to run tests by creating a container and mounting the needle project directory on `/app` +> `docker create --name Needle -v /app -w /app -v /app/node_modules -i node:argon` + +Credits +------- + +Written by Tomás Pollak, with the help of contributors. + +Copyright +--------- + +(c) Fork Ltd. Licensed under the MIT license. diff --git a/node_modules/needle/bin/needle b/node_modules/needle/bin/needle new file mode 100755 index 00000000..baaa79b0 --- /dev/null +++ b/node_modules/needle/bin/needle @@ -0,0 +1,40 @@ +#!/usr/bin/env node +var needle = require('./../lib/needle'); + +function exit(code, str) { + console.log(str) || process.exit(code); +} + +function usage() { + var out = ['Usage: needle [get|head|post|put|delete] url [query]']; + out.push('Examples: \n needle get google.com\n needle post server.com/api foo=bar'); + exit(1, out.join('\n')) +} + +if (process.argv[2] == '-v' || process.argv[2] == '--version') + exit(0, needle.version); +else if (process.argv[2] == null) + usage(); + +var method = process.argv[2], + url = process.argv[3], + options = { compressed: true, parse_response: true, follow_max: 5, timeout: 10000 }; + +if (!needle[method]) { + url = method; + method = 'get'; +} + +var callback = function(err, resp) { + if (err) return exit(1, "Error: " + err.message); + + if (process.argv.indexOf('-i') != -1) + console.log(resp.headers) || console.log(''); + + console.log(resp.body.toString()); +}; + +if (method == 'post' || method == 'put') + needle[method](url, process.argv[4], options, callback); +else + needle[method](url, options, callback); diff --git a/node_modules/needle/examples/deflated-stream.js b/node_modules/needle/examples/deflated-stream.js new file mode 100644 index 00000000..4e771b74 --- /dev/null +++ b/node_modules/needle/examples/deflated-stream.js @@ -0,0 +1,22 @@ +var fs = require('fs'), + stream = require('stream'), + needle = require('./../'); + +var url = 'http://ibl.gamechaser.net/f/tagqfxtteucbuldhezkz/bt_level1.gz'; + +var resp = needle.get(url, { compressed: true, follow_max: 10 }); +console.log('Downloading...'); + +resp.on('readable', function() { + + while (data = this.read()) { + var lines = data.toString().split('\n'); + console.log('Got ' + lines.length + ' items.'); + // console.log(lines); + } + +}) + +resp.on('done', function(data) { + console.log('Done'); +}) diff --git a/node_modules/needle/examples/digest-auth.js b/node_modules/needle/examples/digest-auth.js new file mode 100644 index 00000000..5b8e5d5e --- /dev/null +++ b/node_modules/needle/examples/digest-auth.js @@ -0,0 +1,16 @@ +var needle = require('./..'); + +var opts = { + username: 'user3', + password: 'user3', + auth: 'digest' +} + +needle.get('http://test.webdav.org/auth-digest/', opts, function(err, resp, body) { + console.log(resp.headers); + + if (resp.statusCode == 401) + console.log('\nIt failed.') + else + console.log('\nIt worked!') +}); diff --git a/node_modules/needle/examples/download-to-file.js b/node_modules/needle/examples/download-to-file.js new file mode 100644 index 00000000..39d5af97 --- /dev/null +++ b/node_modules/needle/examples/download-to-file.js @@ -0,0 +1,18 @@ +var fs = require('fs'), + needle = require('./..'), + path = require('path'); + +var url = process.argv[2] || 'https://upload.wikimedia.org/wikipedia/commons/a/af/Tux.png'; +var file = path.basename(url); + +console.log('Downloading ' + file); + +needle.get(url, { output: file, follow: 3 }, function(err, resp, data){ + console.log('File saved: ' + process.cwd() + '/' + file); + + var size = fs.statSync(file).size; + if (size == resp.bytes) + console.log(resp.bytes + ' bytes written to file.'); + else + throw new Error('File size mismatch: ' + size + ' != ' + resp.bytes); +}); diff --git a/node_modules/needle/examples/multipart-stream.js b/node_modules/needle/examples/multipart-stream.js new file mode 100644 index 00000000..df1ada6e --- /dev/null +++ b/node_modules/needle/examples/multipart-stream.js @@ -0,0 +1,25 @@ +var needle = require('./../'); + +var url = 'http://posttestserver.com/post.php?dir=needle'; + +var black_pixel = Buffer.from("R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs=", 'base64'); + +var data = { + foo: 'bar', + nested: { + test: 123 + }, + image: { buffer: black_pixel, content_type: 'image/gif' } +} + +var resp = needle.post(url, data, { multipart: true }); + +resp.on('readable', function() { + while (data = this.read()) { + console.log(data.toString()); + } +}) + +resp.on('done', function(data) { + console.log('Done.'); +}) diff --git a/node_modules/needle/examples/parsed-stream.js b/node_modules/needle/examples/parsed-stream.js new file mode 100644 index 00000000..89c22c66 --- /dev/null +++ b/node_modules/needle/examples/parsed-stream.js @@ -0,0 +1,23 @@ +////////////////////////////////////////// +// This example demonstrates what happends +// when you use the built-in JSON parser. +////////////////////////////////////////// + +var fs = require('fs'), + stream = require('stream'), + needle = require('./../'); + +var url = 'http://ip.jsontest.com/', + resp = needle.get(url, { parse: true }); + +resp.on('readable', function(obj) { + var chunk; + + while (chunk = this.read()) { + console.log('root = ', chunk); + } +}); + +resp.on('done', function() { + console.log('Done.'); +}); diff --git a/node_modules/needle/examples/parsed-stream2.js b/node_modules/needle/examples/parsed-stream2.js new file mode 100644 index 00000000..5d9b79af --- /dev/null +++ b/node_modules/needle/examples/parsed-stream2.js @@ -0,0 +1,21 @@ +////////////////////////////////////////// +// This example illustrates a more complex +// example of parsing a JSON stream. +////////////////////////////////////////// + +var needle = require('./../'), + JSONStream = require('JSONStream'); + +var url = 'http://jsonplaceholder.typicode.com/db'; + +// Initialize our GET request with our default (JSON) +// parsers disabled. + +var json = new needle.get(url, {parse: false}) + // And now interpret the stream as JSON, returning only the + // title of all the posts. + .pipe(new JSONStream.parse('posts.*.title')); + +json.on('data', function (obj) { + console.log('got title: \'' + obj + '\''); +}) diff --git a/node_modules/needle/examples/stream-events.js b/node_modules/needle/examples/stream-events.js new file mode 100644 index 00000000..62309938 --- /dev/null +++ b/node_modules/needle/examples/stream-events.js @@ -0,0 +1,23 @@ +var needle = require('./..'); + +var resp = needle.get('google.com', { follow_max: 10, timeout: 5000 }); + +resp.on('readable', function() { + var chunk; + while (chunk = this.read()) { + console.log('Got ' + chunk.length + ' bytes'); + } +}) + +resp.on('headers', function(headers) { + console.log('Got headers', headers); +}) + +resp.on('redirect', function(url) { + console.log('Redirected to url ' + url); +}) + +resp.on('done', function(err) { + console.log('Finished. No more data to receive.'); + if (err) console.log('With error', err) +}) diff --git a/node_modules/needle/examples/stream-to-file.js b/node_modules/needle/examples/stream-to-file.js new file mode 100644 index 00000000..d261bee9 --- /dev/null +++ b/node_modules/needle/examples/stream-to-file.js @@ -0,0 +1,14 @@ +var fs = require('fs'), + needle = require('./..'), + path = require('path'); + +var url = process.argv[2] || 'http://www.google.com/images/errors/robot.png'; +var file = path.basename(url); + +console.log('Downloading ' + file + '...'); +needle + .get(url) + .pipe(fs.createWriteStream(file)) + .on('done', function() { + console.log('Done!') + }) diff --git a/node_modules/needle/examples/upload-image.js b/node_modules/needle/examples/upload-image.js new file mode 100644 index 00000000..090e3e3f --- /dev/null +++ b/node_modules/needle/examples/upload-image.js @@ -0,0 +1,51 @@ +var needle = require('../'), + path = require('path'); + +var image = 'https://upload.wikimedia.org/wikipedia/commons/a/af/Tux.png'; + +function upload(obj, cb) { + console.log('Uploading image...'); + + var url = 'http://deviantsart.com'; + + var opts = { + timeout: 10000, + follow: 3, + multipart: true + }; + + var params = { + file: obj + } + + needle.post(url, params, opts, function(err, resp) { + if (err || !resp.body.match('url')) + return cb(err || new Error('No image URL found.')) + + cb(null, JSON.parse(resp.body).url) + }) +} + +function download(url, cb) { + console.log('Getting ' + url); + needle.get(url, function(err, resp) { + if (err) throw err; + + cb(null, resp.body); + }) +} + +//////////////////////////////////////// +// ok, now go. + +download(image, function(err, buffer) { + if (err) throw err; + + var obj = { buffer: buffer, content_type: 'image/png' }; + + upload(obj, function(err, url) { + if (err) throw err; + + console.log('Image uploaded to ' + url); + }) +}) diff --git a/node_modules/needle/lib/auth.js b/node_modules/needle/lib/auth.js new file mode 100644 index 00000000..b7ed4caf --- /dev/null +++ b/node_modules/needle/lib/auth.js @@ -0,0 +1,110 @@ +var createHash = require('crypto').createHash; + +function get_header(header, credentials, opts) { + var type = header.split(' ')[0], + user = credentials[0], + pass = credentials[1]; + + if (type == 'Digest') { + return digest.generate(header, user, pass, opts.method, opts.path); + } else if (type == 'Basic') { + return basic(user, pass); + } +} + +//////////////////// +// basic + +function md5(string) { + return createHash('md5').update(string).digest('hex'); +} + +function basic(user, pass) { + var str = typeof pass == 'undefined' ? user : [user, pass].join(':'); + return 'Basic ' + Buffer.from(str).toString('base64'); +} + +//////////////////// +// digest +// logic inspired from https://github.com/simme/node-http-digest-client + +var digest = {}; + +digest.parse_header = function(header) { + var challenge = {}, + matches = header.match(/([a-z0-9_-]+)="?([a-z0-9=\/\.@\s-]+)"?/gi); + + for (var i = 0, l = matches.length; i < l; i++) { + var parts = matches[i].split('='), + key = parts.shift(), + val = parts.join('=').replace(/^"/, '').replace(/"$/, ''); + + challenge[key] = val; + } + + return challenge; +} + +digest.update_nc = function(nc) { + var max = 99999999; + nc++; + + if (nc > max) + nc = 1; + + var padding = new Array(8).join('0') + ''; + nc = nc + ''; + return padding.substr(0, 8 - nc.length) + nc; +} + +digest.generate = function(header, user, pass, method, path) { + + var nc = 1, + cnonce = null, + challenge = digest.parse_header(header); + + var ha1 = md5(user + ':' + challenge.realm + ':' + pass), + ha2 = md5(method.toUpperCase() + ':' + path), + resp = [ha1, challenge.nonce]; + + if (typeof challenge.qop === 'string') { + cnonce = md5(Math.random().toString(36)).substr(0, 8); + nc = digest.update_nc(nc); + resp = resp.concat(nc, cnonce); + } + + resp = resp.concat(challenge.qop, ha2); + + var params = { + uri : path, + realm : challenge.realm, + nonce : challenge.nonce, + username : user, + response : md5(resp.join(':')) + } + + if (challenge.qop) { + params.qop = challenge.qop; + } + + if (challenge.opaque) { + params.opaque = challenge.opaque; + } + + if (cnonce) { + params.nc = nc; + params.cnonce = cnonce; + } + + header = [] + for (var k in params) + header.push(k + '="' + params[k] + '"') + + return 'Digest ' + header.join(', '); +} + +module.exports = { + header : get_header, + basic : basic, + digest : digest.generate +} diff --git a/node_modules/needle/lib/cookies.js b/node_modules/needle/lib/cookies.js new file mode 100644 index 00000000..0f48afcf --- /dev/null +++ b/node_modules/needle/lib/cookies.js @@ -0,0 +1,79 @@ + +// Simple cookie handling implementation based on the standard RFC 6265. +// +// This module just has two functionalities: +// - Parse a set-cookie-header as a key value object +// - Write a cookie-string from a key value object +// +// All cookie attributes are ignored. + +var unescape = require('querystring').unescape; + +var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/; +var EXCLUDED_CHARS = /[\x00-\x1F\x7F\x3B\x3B\s\"\,\\"%]/g; +var TRAILING_SEMICOLON = /\x3B+$/; +var SEP_SEMICOLON = /\s*\x3B\s*/; + +// i know these should be 'const', but I'd like to keep +// supporting earlier node.js versions as long as I can. :) + +var KEY_INDEX = 1; // index of key from COOKIE_PAIR match +var VALUE_INDEX = 3; // index of value from COOKIE_PAIR match + +// Returns a copy str trimmed and without trainling semicolon. +function cleanCookieString(str) { + return str.trim().replace(/\x3B+$/, ''); +} + +function getFirstPair(str) { + var index = str.indexOf('\x3B'); + return index === -1 ? str : str.substr(0, index); +} + +// Returns a encoded copy of str based on RFC6265 S4.1.1. +function encodeCookieComponent(str) { + return str.toString().replace(EXCLUDED_CHARS, encodeURIComponent); +} + +// Parses a set-cookie-string based on the standard defined in RFC6265 S4.1.1. +function parseSetCookieString(str) { + str = cleanCookieString(str); + str = getFirstPair(str); + + var res = COOKIE_PAIR.exec(str); + if (!res || !res[VALUE_INDEX]) return null; + + return { + name : unescape(res[KEY_INDEX]), + value : unescape(res[VALUE_INDEX]) + }; +} + +// Parses a set-cookie-header and returns a key/value object. +// Each key represents the name of a cookie. +function parseSetCookieHeader(header) { + if (!header) return {}; + header = Array.isArray(header) ? header : [header]; + + return header.reduce(function(res, str) { + var cookie = parseSetCookieString(str); + if (cookie) res[cookie.name] = cookie.value; + return res; + }, {}); +} + +// Writes a set-cookie-string based on the standard definded in RFC6265 S4.1.1. +function writeCookieString(obj) { + return Object.keys(obj).reduce(function(str, name) { + var encodedName = encodeCookieComponent(name); + var encodedValue = encodeCookieComponent(obj[name]); + str += (str ? '; ' : '') + encodedName + '=' + encodedValue; + return str; + }, ''); +} + +// returns a key/val object from an array of cookie strings +exports.read = parseSetCookieHeader; + +// writes a cookie string header +exports.write = writeCookieString; diff --git a/node_modules/needle/lib/decoder.js b/node_modules/needle/lib/decoder.js new file mode 100644 index 00000000..d82bb5a3 --- /dev/null +++ b/node_modules/needle/lib/decoder.js @@ -0,0 +1,53 @@ +var iconv, + inherits = require('util').inherits, + stream = require('stream'); + +var regex = /(?:charset|encoding)\s*=\s*['"]? *([\w\-]+)/i; + +inherits(StreamDecoder, stream.Transform); + +function StreamDecoder(charset) { + if (!(this instanceof StreamDecoder)) + return new StreamDecoder(charset); + + stream.Transform.call(this, charset); + this.charset = charset; + this.parsed_chunk = false; +} + +StreamDecoder.prototype._transform = function(chunk, encoding, done) { + var res, found; + + // try get charset from chunk, just once + if (this.charset == 'iso-8859-1' && !this.parsed_chunk) { + this.parsed_chunk = true; + + var matches = regex.exec(chunk.toString()); + if (matches) { + found = matches[1].toLowerCase(); + this.charset = found == 'utf-8' ? 'utf8' : found; + } + } + + try { + res = iconv.decode(chunk, this.charset); + } catch(e) { // something went wrong, just return original chunk + res = chunk; + } + + this.push(res); + done(); +} + +module.exports = function(charset) { + try { + if (!iconv) iconv = require('iconv-lite'); + } catch(e) { + /* iconv not found */ + } + + if (iconv) + return new StreamDecoder(charset); + else + return new stream.PassThrough; +} diff --git a/node_modules/needle/lib/multipart.js b/node_modules/needle/lib/multipart.js new file mode 100644 index 00000000..d1e6e185 --- /dev/null +++ b/node_modules/needle/lib/multipart.js @@ -0,0 +1,98 @@ +var readFile = require('fs').readFile, + basename = require('path').basename; + +exports.build = function(data, boundary, callback) { + + if (typeof data != 'object' || typeof data.pipe == 'function') + return callback(new Error('Multipart builder expects data as key/val object.')); + + var body = '', + object = flatten(data), + count = Object.keys(object).length; + + if (count === 0) + return callback(new Error('Empty multipart body. Invalid data.')) + + function done(err, section) { + if (err) return callback(err); + if (section) body += section; + --count || callback(null, body + '--' + boundary + '--'); + }; + + for (var key in object) { + var value = object[key]; + if (value === null || typeof value == 'undefined') { + done(); + } else if (Buffer.isBuffer(value)) { + var part = { buffer: value, content_type: 'application/octet-stream' }; + generate_part(key, part, boundary, done); + } else { + var part = (value.buffer || value.file || value.content_type) ? value : { value: value }; + generate_part(key, part, boundary, done); + } + } + +} + +function generate_part(name, part, boundary, callback) { + + var return_part = '--' + boundary + '\r\n'; + return_part += 'Content-Disposition: form-data; name="' + name + '"'; + + function append(data, filename) { + + if (data) { + var binary = part.content_type.indexOf('text') == -1; + return_part += '; filename="' + encodeURIComponent(filename) + '"\r\n'; + if (binary) return_part += 'Content-Transfer-Encoding: binary\r\n'; + return_part += 'Content-Type: ' + part.content_type + '\r\n\r\n'; + return_part += binary ? data.toString('binary') : data.toString('utf8'); + } + + callback(null, return_part + '\r\n'); + }; + + if ((part.file || part.buffer) && part.content_type) { + + var filename = part.filename ? part.filename : part.file ? basename(part.file) : name; + if (part.buffer) return append(part.buffer, filename); + + readFile(part.file, function(err, data) { + if (err) return callback(err); + append(data, filename); + }); + + } else { + + if (typeof part.value == 'object') + return callback(new Error('Object received for ' + name + ', expected string.')) + + if (part.content_type) { + return_part += '\r\n'; + return_part += 'Content-Type: ' + part.content_type; + } + + return_part += '\r\n\r\n'; + return_part += Buffer.from(String(part.value), 'utf8').toString('binary'); + append(); + + } + +} + +// flattens nested objects for multipart body +function flatten(object, into, prefix) { + into = into || {}; + + for(var key in object) { + var prefix_key = prefix ? prefix + '[' + key + ']' : key; + var prop = object[key]; + + if (prop && typeof prop === 'object' && !(prop.buffer || prop.file || prop.content_type)) + flatten(prop, into, prefix_key) + else + into[prefix_key] = prop; + } + + return into; +} diff --git a/node_modules/needle/lib/needle.js b/node_modules/needle/lib/needle.js new file mode 100644 index 00000000..001007c9 --- /dev/null +++ b/node_modules/needle/lib/needle.js @@ -0,0 +1,797 @@ +////////////////////////////////////////// +// Needle -- HTTP Client for Node.js +// Written by Tomás Pollak +// (c) 2012-2017 - Fork Ltd. +// MIT Licensed +////////////////////////////////////////// + +var fs = require('fs'), + http = require('http'), + https = require('https'), + url = require('url'), + stream = require('stream'), + debug = require('debug')('needle'), + stringify = require('./querystring').build, + multipart = require('./multipart'), + auth = require('./auth'), + cookies = require('./cookies'), + parsers = require('./parsers'), + decoder = require('./decoder'); + +////////////////////////////////////////// +// variabilia + +var version = require('../package.json').version; + +var user_agent = 'Needle/' + version; +user_agent += ' (Node.js ' + process.version + '; ' + process.platform + ' ' + process.arch + ')'; + +var tls_options = 'agent pfx key passphrase cert ca ciphers rejectUnauthorized secureProtocol checkServerIdentity'; + +// older versions of node (< 0.11.4) prevent the runtime from exiting +// because of connections in keep-alive state. so if this is the case +// we'll default new requests to set a Connection: close header. +var close_by_default = !http.Agent || http.Agent.defaultMaxSockets != Infinity; + +// see if we have Object.assign. otherwise fall back to util._extend +var extend = Object.assign ? Object.assign : require('util')._extend; + +// these are the status codes that Needle interprets as redirects. +var redirect_codes = [301, 302, 303, 307]; + +////////////////////////////////////////// +// decompressors for gzip/deflate bodies + +var decompressors = {}; + +try { + + var zlib = require('zlib'); + decompressors['x-deflate'] = zlib.Inflate; + decompressors['deflate'] = zlib.Inflate; + decompressors['x-gzip'] = zlib.Gunzip; + decompressors['gzip'] = zlib.Gunzip; + + // Enable Z_SYNC_FLUSH to avoid Z_BUF_ERROR errors (Node PR #2595) + var zlib_options = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + } + +} catch(e) { /* zlib not available */ } + +////////////////////////////////////////// +// options and aliases + +var defaults = { + // data + boundary : '--------------------NODENEEDLEHTTPCLIENT', + encoding : 'utf8', + parse_response : 'all', // same as true. valid options: 'json', 'xml' or false/null + proxy : null, + + // headers + accept : '*/*', + user_agent : user_agent, + + // numbers + open_timeout : 10000, + response_timeout : 0, + read_timeout : 0, + follow_max : 0, + stream_length : -1, + + // booleans + decode_response : true, + parse_cookies : true, + follow_set_cookies : false, + follow_set_referer : false, + follow_keep_method : false, + follow_if_same_host : false, + follow_if_same_protocol : false +} + +var aliased = { + options: { + decode : 'decode_response', + parse : 'parse_response', + timeout : 'open_timeout', + follow : 'follow_max' + }, + inverted: {} +} + +// only once, invert aliased keys so we can get passed options. +Object.keys(aliased.options).map(function(k) { + var value = aliased.options[k]; + aliased.inverted[value] = k; +}); + +////////////////////////////////////////// +// helpers + +function keys_by_type(type) { + return Object.keys(defaults).map(function(el) { + if (defaults[el] !== null && defaults[el].constructor == type) + return el; + }).filter(function(el) { return el }) +} + +function parse_content_type(header) { + if (!header || header === '') return {}; + + var found, charset = 'iso-8859-1', arr = header.split(';'); + + if (arr.length > 1 && (found = arr[1].match(/charset=(.+)/))) + charset = found[1]; + + return { type: arr[0], charset: charset }; +} + +function is_stream(obj) { + return typeof obj.pipe === 'function'; +} + +function get_stream_length(stream, given_length, cb) { + if (given_length > 0) + return cb(given_length); + + if (stream.end !== void 0 && stream.end !== Infinity && stream.start !== void 0) + return cb((stream.end + 1) - (stream.start || 0)); + + fs.stat(stream.path, function(err, stat) { + cb(stat ? stat.size - (stream.start || 0) : null); + }); +} + +////////////////////////////////////////// +// the main act + +function Needle(method, uri, data, options, callback) { + // if (!(this instanceof Needle)) { + // return new Needle(method, uri, data, options, callback); + // } + + if (typeof uri !== 'string') + throw new TypeError('URL must be a string, not ' + uri); + + this.method = method; + this.uri = uri; + this.data = data; + + if (typeof options == 'function') { + this.callback = options; + this.options = {}; + } else { + this.callback = callback; + this.options = options; + } + +} + +Needle.prototype.setup = function(uri, options) { + + function get_option(key, fallback) { + // if original is in options, return that value + if (typeof options[key] != 'undefined') return options[key]; + + // otherwise, return value from alias or fallback/undefined + return typeof options[aliased.inverted[key]] != 'undefined' + ? options[aliased.inverted[key]] : fallback; + } + + function check_value(expected, key) { + var value = get_option(key), + type = typeof value; + + if (type != 'undefined' && type != expected) + throw new TypeError(type + ' received for ' + key + ', but expected a ' + expected); + + return (type == expected) ? value : defaults[key]; + } + + ////////////////////////////////////////////////// + // the basics + + var config = { + http_opts : { + localAddress: get_option('localAddress', undefined) + }, // passed later to http.request() directly + output : options.output, + proxy : get_option('proxy', defaults.proxy), + parser : get_option('parse_response', defaults.parse_response), + encoding : options.encoding || (options.multipart ? 'binary' : defaults.encoding) + } + + keys_by_type(Boolean).forEach(function(key) { + config[key] = check_value('boolean', key); + }) + + keys_by_type(Number).forEach(function(key) { + config[key] = check_value('number', key); + }) + + // populate http_opts with given TLS options + tls_options.split(' ').forEach(function(key) { + if (typeof options[key] != 'undefined') { + config.http_opts[key] = options[key]; + if (typeof options.agent == 'undefined') + config.http_opts.agent = false; // otherwise tls options are skipped + } + }); + + ////////////////////////////////////////////////// + // headers, cookies + + config.headers = { + 'accept' : options.accept || defaults.accept, + 'user-agent' : options.user_agent || defaults.user_agent + } + + if (options.content_type) + config.headers['content-type'] = options.content_type; + + // set connection header if opts.connection was passed, or if node < 0.11.4 (close) + if (options.connection || close_by_default) + config.headers['connection'] = options.connection || 'close'; + + if ((options.compressed || defaults.compressed) && typeof zlib != 'undefined') + config.headers['accept-encoding'] = 'gzip,deflate'; + + if (options.cookies) + config.headers['cookie'] = cookies.write(options.cookies); + + ////////////////////////////////////////////////// + // basic/digest auth + + if (uri.match(/[^\/]@/)) { // url contains user:pass@host, so parse it. + var parts = (url.parse(uri).auth || '').split(':'); + options.username = parts[0]; + options.password = parts[1]; + } + + if (options.username) { + if (options.auth && (options.auth == 'auto' || options.auth == 'digest')) { + config.credentials = [options.username, options.password]; + } else { + config.headers['authorization'] = auth.basic(options.username, options.password); + } + } + + // if proxy is present, set auth header from either url or proxy_user option. + if (config.proxy) { + if (config.proxy.indexOf('http') === -1) + config.proxy = 'http://' + config.proxy; + + if (config.proxy.indexOf('@') !== -1) { + var proxy = (url.parse(config.proxy).auth || '').split(':'); + options.proxy_user = proxy[0]; + options.proxy_pass = proxy[1]; + } + + if (options.proxy_user) + config.headers['proxy-authorization'] = auth.basic(options.proxy_user, options.proxy_pass); + } + + // now that all our headers are set, overwrite them if instructed. + for (var h in options.headers) + config.headers[h.toLowerCase()] = options.headers[h]; + + return config; +} + +Needle.prototype.start = function() { + + var out = new stream.PassThrough({ objectMode: false }), + uri = this.uri, + data = this.data, + method = this.method, + callback = (typeof this.options == 'function') ? this.options : this.callback, + options = this.options || {}; + + // if no 'http' is found on URL, prepend it. + if (uri.indexOf('http') === -1) + uri = uri.replace(/^(\/\/)?/, 'http://'); + + var self = this, body, waiting = false, config = this.setup(uri, options); + + // unless options.json was set to false, assume boss also wants JSON if content-type matches. + var json = options.json || (options.json !== false && config.headers['content-type'] == 'application/json'); + + if (data) { + + if (options.multipart) { // boss says we do multipart. so we do it. + var boundary = options.boundary || defaults.boundary; + + waiting = true; + multipart.build(data, boundary, function(err, parts) { + if (err) throw(err); + + config.headers['content-type'] = 'multipart/form-data; boundary=' + boundary; + next(parts); + }); + + } else if (is_stream(data)) { + + if (method.toUpperCase() == 'GET') + throw new Error('Refusing to pipe() a stream via GET. Did you mean .post?'); + + if (config.stream_length > 0 || (config.stream_length === 0 && data.path)) { + // ok, let's get the stream's length and set it as the content-length header. + // this prevents some servers from cutting us off before all the data is sent. + waiting = true; + get_stream_length(data, config.stream_length, function(length) { + data.length = length; + next(data); + }) + + } else { + // if the boss doesn't want us to get the stream's length, or if it doesn't + // have a file descriptor for that purpose, then just head on. + body = data; + } + + } else if (Buffer.isBuffer(data)) { + + body = data; // use the raw buffer as request body. + + } else if (method.toUpperCase() == 'GET' && !json) { + + // append the data to the URI as a querystring. + uri = uri.replace(/\?.*|$/, '?' + stringify(data)); + + } else { // string or object data, no multipart. + + // if string, leave it as it is, otherwise, stringify. + body = (typeof(data) === 'string') ? data + : json ? JSON.stringify(data) : stringify(data); + + // ensure we have a buffer so bytecount is correct. + body = Buffer.from(body, config.encoding); + } + + } + + function next(body) { + if (body) { + if (body.length) config.headers['content-length'] = body.length; + + // if no content-type was passed, determine if json or not. + if (!config.headers['content-type']) { + config.headers['content-type'] = json + ? 'application/json; charset=utf-8' + : 'application/x-www-form-urlencoded'; // no charset says W3 spec. + } + } + + // unless a specific accept header was set, assume json: true wants JSON back. + if (options.json && (!options.accept && !(options.headers || {}).accept)) + config.headers['accept'] = 'application/json'; + + self.send_request(1, method, uri, config, body, out, callback); + } + + if (!waiting) next(body); + return out; +} + +Needle.prototype.get_request_opts = function(method, uri, config) { + var opts = config.http_opts, + proxy = config.proxy, + remote = proxy ? url.parse(proxy) : url.parse(uri); + + opts.protocol = remote.protocol; + opts.host = remote.hostname; + opts.port = remote.port || (remote.protocol == 'https:' ? 443 : 80); + opts.path = proxy ? uri : remote.pathname + (remote.search || ''); + opts.method = method; + opts.headers = config.headers; + + if (!opts.headers['host']) { + // if using proxy, make sure the host header shows the final destination + var target = proxy ? url.parse(uri) : remote; + opts.headers['host'] = target.hostname; + + // and if a non standard port was passed, append it to the port header + if (target.port && [80, 443].indexOf(target.port) === -1) { + opts.headers['host'] += ':' + target.port; + } + } + + return opts; +} + +Needle.prototype.should_follow = function(location, config, original) { + if (!location) return false; + + // returns true if location contains matching property (host or protocol) + function matches(property) { + var property = original[property]; + return location.indexOf(property) !== -1; + } + + // first, check whether the requested location is actually different from the original + if (location === original) + return false; + + if (config.follow_if_same_host && !matches('host')) + return false; // host does not match, so not following + + if (config.follow_if_same_protocol && !matches('protocol')) + return false; // procotol does not match, so not following + + return true; +} + +Needle.prototype.send_request = function(count, method, uri, config, post_data, out, callback) { + + var timer, + returned = 0, + self = this, + request_opts = this.get_request_opts(method, uri, config), + protocol = request_opts.protocol == 'https:' ? https : http; + + function done(err, resp) { + if (returned++ > 0) + return debug('Already finished, stopping here.'); + + if (timer) clearTimeout(timer); + request.removeListener('error', had_error); + + if (callback) + return callback(err, resp, resp ? resp.body : undefined); + + // NOTE: this event used to be called 'end', but the behaviour was confusing + // when errors ocurred, because the stream would still emit an 'end' event. + out.emit('done', err); + } + + function had_error(err) { + debug('Request error', err); + out.emit('err', err); + done(err || new Error('Unknown error when making request.')); + } + + function set_timeout(type, milisecs) { + if (timer) clearTimeout(timer); + if (milisecs <= 0) return; + + timer = setTimeout(function() { + out.emit('timeout', type); + request.abort(); + // also invoke done() to terminate job on read_timeout + if (type == 'read') done(new Error(type + ' timeout')); + }, milisecs); + } + + // handle errors on the underlying socket, that may be closed while writing + // for an example case, see test/long_string_spec.js. we make sure this + // scenario ocurred by verifying the socket's writable & destroyed states. + function on_socket_end() { + if (!this.writable && this.destroyed === false) { + this.destroy(); + had_error(new Error('Remote end closed socket abruptly.')) + } + } + + debug('Making request #' + count, request_opts); + var request = protocol.request(request_opts, function(resp) { + + var headers = resp.headers; + debug('Got response', resp.statusCode, headers); + out.emit('response', resp); + + set_timeout('read', config.read_timeout); + + // if we got cookies, parse them unless we were instructed not to. make sure to include any + // cookies that might have been set on previous redirects. + if (config.parse_cookies && (headers['set-cookie'] || config.stored_cookies)) { + resp.cookies = extend(config.stored_cookies || {}, cookies.read(headers['set-cookie'])); + debug('Got cookies', resp.cookies); + } + + // if redirect code is found, determine if we should follow it according to the given options. + if (redirect_codes.indexOf(resp.statusCode) !== -1 && self.should_follow(headers.location, config, uri)) { + // clear timer before following redirects to prevent unexpected setTimeout consequence + clearTimeout(timer); + + if (count <= config.follow_max) { + out.emit('redirect', headers.location); + + // unless 'follow_keep_method' is true, rewrite the request to GET before continuing. + if (!config.follow_keep_method) { + method = 'GET'; + post_data = null; + delete config.headers['content-length']; // in case the original was a multipart POST request. + } + + // if follow_set_cookies is true, make sure to put any cookies in the next request's headers. + if (config.follow_set_cookies && resp.cookies) { + config.stored_cookies = resp.cookies; + config.headers['cookie'] = cookies.write(resp.cookies); + } + + if (config.follow_set_referer) + config.headers['referer'] = encodeURI(uri); // the original, not the destination URL. + + config.headers['host'] = null; // clear previous Host header to avoid conflicts. + + debug('Redirecting to ' + url.resolve(uri, headers.location)); + return self.send_request(++count, method, url.resolve(uri, headers.location), config, post_data, out, callback); + } else if (config.follow_max > 0) { + return done(new Error('Max redirects reached. Possible loop in: ' + headers.location)); + } + } + + // if auth is requested and credentials were not passed, resend request, provided we have user/pass. + if (resp.statusCode == 401 && headers['www-authenticate'] && config.credentials) { + if (!config.headers['authorization']) { // only if authentication hasn't been sent + var auth_header = auth.header(headers['www-authenticate'], config.credentials, request_opts); + + if (auth_header) { + config.headers['authorization'] = auth_header; + return self.send_request(count, method, uri, config, post_data, out, callback); + } + } + } + + // ok, so we got a valid (non-redirect & authorized) response. let's notify the stream guys. + out.emit('header', resp.statusCode, headers); + out.emit('headers', headers); + + var pipeline = [], + mime = parse_content_type(headers['content-type']), + text_response = mime.type && mime.type.indexOf('text/') != -1; + + // To start, if our body is compressed and we're able to inflate it, do it. + if (headers['content-encoding'] && decompressors[headers['content-encoding']]) { + + var decompressor = decompressors[headers['content-encoding']](zlib_options); + + // make sure we catch errors triggered by the decompressor. + decompressor.on('error', had_error); + pipeline.push(decompressor); + } + + // If parse is enabled and we have a parser for it, then go for it. + if (config.parser && parsers[mime.type]) { + + // If a specific parser was requested, make sure we don't parse other types. + var parser_name = config.parser.toString().toLowerCase(); + if (['xml', 'json'].indexOf(parser_name) == -1 || parsers[mime.type].name == parser_name) { + + // OK, so either we're parsing all content types or the one requested matches. + out.parser = parsers[mime.type].name; + pipeline.push(parsers[mime.type].fn()); + + // Set objectMode on out stream to improve performance. + out._writableState.objectMode = true; + out._readableState.objectMode = true; + } + + // If we're not parsing, and unless decoding was disabled, we'll try + // decoding non UTF-8 bodies to UTF-8, using the iconv-lite library. + } else if (text_response && config.decode_response + && mime.charset && !mime.charset.match(/utf-?8$/i)) { + pipeline.push(decoder(mime.charset)); + } + + // And `out` is the stream we finally push the decoded/parsed output to. + pipeline.push(out); + + // Now, release the kraken! + var tmp = resp; + while (pipeline.length) { + tmp = tmp.pipe(pipeline.shift()); + } + + // If the user has requested and output file, pipe the output stream to it. + // In stream mode, we will still get the response stream to play with. + if (config.output && resp.statusCode == 200) { + + // for some reason, simply piping resp to the writable stream doesn't + // work all the time (stream gets cut in the middle with no warning). + // so we'll manually need to do the readable/write(chunk) trick. + var file = fs.createWriteStream(config.output); + file.on('error', had_error); + + out.on('end', function() { + if (file.writable) file.end(); + }); + + file.on('close', function() { + delete out.file; + }) + + out.on('readable', function() { + var chunk; + while ((chunk = this.read()) !== null) { + if (file.writable) file.write(chunk); + + // if callback was requested, also push it to resp.body + if (resp.body) resp.body.push(chunk); + } + }) + + out.file = file; + } + + // Only aggregate the full body if a callback was requested. + if (callback) { + resp.raw = []; + resp.body = []; + resp.bytes = 0; + + // Gather and count the amount of (raw) bytes using a PassThrough stream. + var clean_pipe = new stream.PassThrough(); + resp.pipe(clean_pipe); + + clean_pipe.on('readable', function() { + var chunk; + while ((chunk = this.read()) != null) { + resp.bytes += chunk.length; + resp.raw.push(chunk); + } + }) + + // Listen on the 'readable' event to aggregate the chunks, but only if + // file output wasn't requested. Otherwise we'd have two stream readers. + if (!config.output || resp.statusCode != 200) { + out.on('readable', function() { + var chunk; + while ((chunk = this.read()) !== null) { + // We're either pushing buffers or objects, never strings. + if (typeof chunk == 'string') chunk = Buffer.from(chunk); + + // Push all chunks to resp.body. We'll bind them in resp.end(). + resp.body.push(chunk); + } + }) + } + } + + // And set the .body property once all data is in. + out.on('end', function() { + if (resp.body) { // callback mode + + // we want to be able to access to the raw data later, so keep a reference. + resp.raw = Buffer.concat(resp.raw); + + // if parse was successful, we should have an array with one object + if (resp.body[0] !== undefined && !Buffer.isBuffer(resp.body[0])) { + + // that's our body right there. + resp.body = resp.body[0]; + + // set the parser property on our response. we may want to check. + if (out.parser) resp.parser = out.parser; + + } else { // we got one or several buffers. string or binary. + resp.body = Buffer.concat(resp.body); + + // if we're here and parsed is true, it means we tried to but it didn't work. + // so given that we got a text response, let's stringify it. + if (text_response || out.parser) { + resp.body = resp.body.toString(); + } + } + } + + // if an output file is being written to, make sure the callback + // is triggered after all data has been written to it. + if (out.file) { + out.file.on('close', function() { + done(null, resp, resp.body); + }) + } else { // elvis has left the building. + done(null, resp, resp.body); + } + + }); + + }); // end request call + + // unless open_timeout was disabled, set a timeout to abort the request. + set_timeout('open', config.open_timeout); + + // handle errors on the request object. things might get bumpy. + request.on('error', had_error); + + // make sure timer is cleared if request is aborted (issue #257) + request.once('abort', function() { + if (timer) clearTimeout(timer); + }) + + // handle socket 'end' event to ensure we don't get delayed EPIPE errors. + request.once('socket', function(socket) { + if (socket.connecting) { + socket.once('connect', function() { + set_timeout('response', config.response_timeout); + }) + } else { + set_timeout('response', config.response_timeout); + } + + // console.log(socket); + if (!socket.on_socket_end) { + socket.on_socket_end = on_socket_end; + socket.once('end', function() { process.nextTick(on_socket_end.bind(socket)) }); + } + }) + + if (post_data) { + if (is_stream(post_data)) { + post_data.pipe(request); + } else { + request.write(post_data, config.encoding); + request.end(); + } + } else { + request.end(); + } + + out.request = request; + return out; +} + +////////////////////////////////////////// +// exports + +if (typeof Promise !== 'undefined') { + module.exports = function() { + var verb, args = [].slice.call(arguments); + + if (args[0].match(/\.|\//)) // first argument looks like a URL + verb = (args.length > 2) ? 'post' : 'get'; + else + verb = args.shift(); + + if (verb.match(/get|head/) && args.length == 2) + args.splice(1, 0, null); // assume no data if head/get with two args (url, options) + + return new Promise(function(resolve, reject) { + module.exports.request(verb, args[0], args[1], args[2], function(err, resp) { + return err ? reject(err) : resolve(resp); + }); + }) + } +} + +module.exports.version = version; + +module.exports.defaults = function(obj) { + for (var key in obj) { + var target_key = aliased.options[key] || key; + + if (defaults.hasOwnProperty(target_key) && typeof obj[key] != 'undefined') { + if (target_key != 'parse_response' && target_key != 'proxy') { + // ensure type matches the original, except for proxy/parse_response that can be null/bool or string + var valid_type = defaults[target_key].constructor.name; + + if (obj[key].constructor.name != valid_type) + throw new TypeError('Invalid type for ' + key + ', should be ' + valid_type); + } + defaults[target_key] = obj[key]; + } else { + throw new Error('Invalid property for defaults:' + target_key); + } + } + + return defaults; +} + +'head get'.split(' ').forEach(function(method) { + module.exports[method] = function(uri, options, callback) { + return new Needle(method, uri, null, options, callback).start(); + } +}) + +'post put patch delete'.split(' ').forEach(function(method) { + module.exports[method] = function(uri, data, options, callback) { + return new Needle(method, uri, data, options, callback).start(); + } +}) + +module.exports.request = function(method, uri, data, opts, callback) { + return new Needle(method, uri, data, opts, callback).start(); +}; diff --git a/node_modules/needle/lib/parsers.js b/node_modules/needle/lib/parsers.js new file mode 100644 index 00000000..108b17bb --- /dev/null +++ b/node_modules/needle/lib/parsers.js @@ -0,0 +1,120 @@ +////////////////////////////////////////// +// Defines mappings between content-type +// and the appropriate parsers. +////////////////////////////////////////// + +var Transform = require('stream').Transform; +var sax = require('sax'); + +function parseXML(str, cb) { + var obj, current, parser = sax.parser(true, { trim: true, lowercase: true }) + parser.onerror = parser.onend = done; + + function done(err) { + parser.onerror = parser.onend = function() { } + cb(err, obj) + } + + function newElement(name, attributes) { + return { + name: name || '', + value: '', + attributes: attributes || {}, + children: [] + } + } + + parser.ontext = function(t) { + if (current) current.value += t + } + + parser.onopentag = function(node) { + var element = newElement(node.name, node.attributes) + if (current) { + element.parent = current + current.children.push(element) + } else { // root object + obj = element + } + + current = element + }; + + parser.onclosetag = function() { + if (typeof current.parent !== 'undefined') { + var just_closed = current + current = current.parent + delete just_closed.parent + } + } + + parser.write(str).close() +} + +function parserFactory(name, fn) { + + function parser() { + var chunks = [], + stream = new Transform({ objectMode: true }); + + // Buffer all our data + stream._transform = function(chunk, encoding, done) { + chunks.push(chunk); + done(); + } + + // And call the parser when all is there. + stream._flush = function(done) { + var self = this, + data = Buffer.concat(chunks); + + try { + fn(data, function(err, result) { + if (err) throw err; + self.push(result); + }); + } catch (err) { + self.push(data); // just pass the original data + } finally { + done(); + } + } + + return stream; + } + + return { fn: parser, name: name }; +} + +var parsers = {} + +function buildParser(name, types, fn) { + var parser = parserFactory(name, fn); + types.forEach(function(type) { + parsers[type] = parser; + }) +} + +buildParser('json', [ + 'application/json', + 'text/javascript' +], function(buffer, cb) { + var err, data; + try { data = JSON.parse(buffer); } catch (e) { err = e; } + cb(err, data); +}); + +buildParser('xml', [ + 'text/xml', + 'application/xml', + 'application/rdf+xml', + 'application/rss+xml', + 'application/atom+xml' +], function(buffer, cb) { + parseXML(buffer.toString(), function(err, obj) { + cb(err, obj) + }) +}); + +module.exports = parsers; +module.exports.use = buildParser; \ No newline at end of file diff --git a/node_modules/needle/lib/querystring.js b/node_modules/needle/lib/querystring.js new file mode 100644 index 00000000..e58cc426 --- /dev/null +++ b/node_modules/needle/lib/querystring.js @@ -0,0 +1,49 @@ +// based on the qs module, but handles null objects as expected +// fixes by Tomas Pollak. + +var toString = Object.prototype.toString; + +function stringify(obj, prefix) { + if (prefix && (obj === null || typeof obj == 'undefined')) { + return prefix + '='; + } else if (toString.call(obj) == '[object Array]') { + return stringifyArray(obj, prefix); + } else if (toString.call(obj) == '[object Object]') { + return stringifyObject(obj, prefix); + } else if (toString.call(obj) == '[object Date]') { + return obj.toISOString(); + } else if (prefix) { // string inside array or hash + return prefix + '=' + encodeURIComponent(String(obj)); + } else if (String(obj).indexOf('=') !== -1) { // string with equal sign + return String(obj); + } else { + throw new TypeError('Cannot build a querystring out of: ' + obj); + } +}; + +function stringifyArray(arr, prefix) { + var ret = []; + + for (var i = 0, len = arr.length; i < len; i++) { + if (prefix) + ret.push(stringify(arr[i], prefix + '[]')); + else + ret.push(stringify(arr[i])); + } + + return ret.join('&'); +} + +function stringifyObject(obj, prefix) { + var ret = []; + + Object.keys(obj).forEach(function(key) { + ret.push(stringify(obj[key], prefix + ? prefix + '[' + encodeURIComponent(key) + ']' + : encodeURIComponent(key))); + }) + + return ret.join('&'); +} + +exports.build = stringify; diff --git a/node_modules/needle/license.txt b/node_modules/needle/license.txt new file mode 100644 index 00000000..fb245f39 --- /dev/null +++ b/node_modules/needle/license.txt @@ -0,0 +1,19 @@ +Copyright (c) Fork, Ltd. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/needle/node_modules/debug/CHANGELOG.md b/node_modules/needle/node_modules/debug/CHANGELOG.md new file mode 100644 index 00000000..820d21e3 --- /dev/null +++ b/node_modules/needle/node_modules/debug/CHANGELOG.md @@ -0,0 +1,395 @@ + +3.1.0 / 2017-09-26 +================== + + * Add `DEBUG_HIDE_DATE` env var (#486) + * Remove ReDoS regexp in %o formatter (#504) + * Remove "component" from package.json + * Remove `component.json` + * Ignore package-lock.json + * Examples: fix colors printout + * Fix: browser detection + * Fix: spelling mistake (#496, @EdwardBetts) + +3.0.1 / 2017-08-24 +================== + + * Fix: Disable colors in Edge and Internet Explorer (#489) + +3.0.0 / 2017-08-08 +================== + + * Breaking: Remove DEBUG_FD (#406) + * Breaking: Use `Date#toISOString()` instead to `Date#toUTCString()` when output is not a TTY (#418) + * Breaking: Make millisecond timer namespace specific and allow 'always enabled' output (#408) + * Addition: document `enabled` flag (#465) + * Addition: add 256 colors mode (#481) + * Addition: `enabled()` updates existing debug instances, add `destroy()` function (#440) + * Update: component: update "ms" to v2.0.0 + * Update: separate the Node and Browser tests in Travis-CI + * Update: refactor Readme, fixed documentation, added "Namespace Colors" section, redid screenshots + * Update: separate Node.js and web browser examples for organization + * Update: update "browserify" to v14.4.0 + * Fix: fix Readme typo (#473) + +2.6.9 / 2017-09-22 +================== + + * remove ReDoS regexp in %o formatter (#504) + +2.6.8 / 2017-05-18 +================== + + * Fix: Check for undefined on browser globals (#462, @marbemac) + +2.6.7 / 2017-05-16 +================== + + * Fix: Update ms to 2.0.0 to fix regular expression denial of service vulnerability (#458, @hubdotcom) + * Fix: Inline extend function in node implementation (#452, @dougwilson) + * Docs: Fix typo (#455, @msasad) + +2.6.5 / 2017-04-27 +================== + + * Fix: null reference check on window.documentElement.style.WebkitAppearance (#447, @thebigredgeek) + * Misc: clean up browser reference checks (#447, @thebigredgeek) + * Misc: add npm-debug.log to .gitignore (@thebigredgeek) + + +2.6.4 / 2017-04-20 +================== + + * Fix: bug that would occur if process.env.DEBUG is a non-string value. (#444, @LucianBuzzo) + * Chore: ignore bower.json in npm installations. (#437, @joaovieira) + * Misc: update "ms" to v0.7.3 (@tootallnate) + +2.6.3 / 2017-03-13 +================== + + * Fix: Electron reference to `process.env.DEBUG` (#431, @paulcbetts) + * Docs: Changelog fix (@thebigredgeek) + +2.6.2 / 2017-03-10 +================== + + * Fix: DEBUG_MAX_ARRAY_LENGTH (#420, @slavaGanzin) + * Docs: Add backers and sponsors from Open Collective (#422, @piamancini) + * Docs: Add Slackin invite badge (@tootallnate) + +2.6.1 / 2017-02-10 +================== + + * Fix: Module's `export default` syntax fix for IE8 `Expected identifier` error + * Fix: Whitelist DEBUG_FD for values 1 and 2 only (#415, @pi0) + * Fix: IE8 "Expected identifier" error (#414, @vgoma) + * Fix: Namespaces would not disable once enabled (#409, @musikov) + +2.6.0 / 2016-12-28 +================== + + * Fix: added better null pointer checks for browser useColors (@thebigredgeek) + * Improvement: removed explicit `window.debug` export (#404, @tootallnate) + * Improvement: deprecated `DEBUG_FD` environment variable (#405, @tootallnate) + +2.5.2 / 2016-12-25 +================== + + * Fix: reference error on window within webworkers (#393, @KlausTrainer) + * Docs: fixed README typo (#391, @lurch) + * Docs: added notice about v3 api discussion (@thebigredgeek) + +2.5.1 / 2016-12-20 +================== + + * Fix: babel-core compatibility + +2.5.0 / 2016-12-20 +================== + + * Fix: wrong reference in bower file (@thebigredgeek) + * Fix: webworker compatibility (@thebigredgeek) + * Fix: output formatting issue (#388, @kribblo) + * Fix: babel-loader compatibility (#383, @escwald) + * Misc: removed built asset from repo and publications (@thebigredgeek) + * Misc: moved source files to /src (#378, @yamikuronue) + * Test: added karma integration and replaced babel with browserify for browser tests (#378, @yamikuronue) + * Test: coveralls integration (#378, @yamikuronue) + * Docs: simplified language in the opening paragraph (#373, @yamikuronue) + +2.4.5 / 2016-12-17 +================== + + * Fix: `navigator` undefined in Rhino (#376, @jochenberger) + * Fix: custom log function (#379, @hsiliev) + * Improvement: bit of cleanup + linting fixes (@thebigredgeek) + * Improvement: rm non-maintainted `dist/` dir (#375, @freewil) + * Docs: simplified language in the opening paragraph. (#373, @yamikuronue) + +2.4.4 / 2016-12-14 +================== + + * Fix: work around debug being loaded in preload scripts for electron (#368, @paulcbetts) + +2.4.3 / 2016-12-14 +================== + + * Fix: navigation.userAgent error for react native (#364, @escwald) + +2.4.2 / 2016-12-14 +================== + + * Fix: browser colors (#367, @tootallnate) + * Misc: travis ci integration (@thebigredgeek) + * Misc: added linting and testing boilerplate with sanity check (@thebigredgeek) + +2.4.1 / 2016-12-13 +================== + + * Fix: typo that broke the package (#356) + +2.4.0 / 2016-12-13 +================== + + * Fix: bower.json references unbuilt src entry point (#342, @justmatt) + * Fix: revert "handle regex special characters" (@tootallnate) + * Feature: configurable util.inspect()`options for NodeJS (#327, @tootallnate) + * Feature: %O`(big O) pretty-prints objects (#322, @tootallnate) + * Improvement: allow colors in workers (#335, @botverse) + * Improvement: use same color for same namespace. (#338, @lchenay) + +2.3.3 / 2016-11-09 +================== + + * Fix: Catch `JSON.stringify()` errors (#195, Jovan Alleyne) + * Fix: Returning `localStorage` saved values (#331, Levi Thomason) + * Improvement: Don't create an empty object when no `process` (Nathan Rajlich) + +2.3.2 / 2016-11-09 +================== + + * Fix: be super-safe in index.js as well (@TooTallNate) + * Fix: should check whether process exists (Tom Newby) + +2.3.1 / 2016-11-09 +================== + + * Fix: Added electron compatibility (#324, @paulcbetts) + * Improvement: Added performance optimizations (@tootallnate) + * Readme: Corrected PowerShell environment variable example (#252, @gimre) + * Misc: Removed yarn lock file from source control (#321, @fengmk2) + +2.3.0 / 2016-11-07 +================== + + * Fix: Consistent placement of ms diff at end of output (#215, @gorangajic) + * Fix: Escaping of regex special characters in namespace strings (#250, @zacronos) + * Fix: Fixed bug causing crash on react-native (#282, @vkarpov15) + * Feature: Enabled ES6+ compatible import via default export (#212 @bucaran) + * Feature: Added %O formatter to reflect Chrome's console.log capability (#279, @oncletom) + * Package: Update "ms" to 0.7.2 (#315, @DevSide) + * Package: removed superfluous version property from bower.json (#207 @kkirsche) + * Readme: fix USE_COLORS to DEBUG_COLORS + * Readme: Doc fixes for format string sugar (#269, @mlucool) + * Readme: Updated docs for DEBUG_FD and DEBUG_COLORS environment variables (#232, @mattlyons0) + * Readme: doc fixes for PowerShell (#271 #243, @exoticknight @unreadable) + * Readme: better docs for browser support (#224, @matthewmueller) + * Tooling: Added yarn integration for development (#317, @thebigredgeek) + * Misc: Renamed History.md to CHANGELOG.md (@thebigredgeek) + * Misc: Added license file (#226 #274, @CantemoInternal @sdaitzman) + * Misc: Updated contributors (@thebigredgeek) + +2.2.0 / 2015-05-09 +================== + + * package: update "ms" to v0.7.1 (#202, @dougwilson) + * README: add logging to file example (#193, @DanielOchoa) + * README: fixed a typo (#191, @amir-s) + * browser: expose `storage` (#190, @stephenmathieson) + * Makefile: add a `distclean` target (#189, @stephenmathieson) + +2.1.3 / 2015-03-13 +================== + + * Updated stdout/stderr example (#186) + * Updated example/stdout.js to match debug current behaviour + * Renamed example/stderr.js to stdout.js + * Update Readme.md (#184) + * replace high intensity foreground color for bold (#182, #183) + +2.1.2 / 2015-03-01 +================== + + * dist: recompile + * update "ms" to v0.7.0 + * package: update "browserify" to v9.0.3 + * component: fix "ms.js" repo location + * changed bower package name + * updated documentation about using debug in a browser + * fix: security error on safari (#167, #168, @yields) + +2.1.1 / 2014-12-29 +================== + + * browser: use `typeof` to check for `console` existence + * browser: check for `console.log` truthiness (fix IE 8/9) + * browser: add support for Chrome apps + * Readme: added Windows usage remarks + * Add `bower.json` to properly support bower install + +2.1.0 / 2014-10-15 +================== + + * node: implement `DEBUG_FD` env variable support + * package: update "browserify" to v6.1.0 + * package: add "license" field to package.json (#135, @panuhorsmalahti) + +2.0.0 / 2014-09-01 +================== + + * package: update "browserify" to v5.11.0 + * node: use stderr rather than stdout for logging (#29, @stephenmathieson) + +1.0.4 / 2014-07-15 +================== + + * dist: recompile + * example: remove `console.info()` log usage + * example: add "Content-Type" UTF-8 header to browser example + * browser: place %c marker after the space character + * browser: reset the "content" color via `color: inherit` + * browser: add colors support for Firefox >= v31 + * debug: prefer an instance `log()` function over the global one (#119) + * Readme: update documentation about styled console logs for FF v31 (#116, @wryk) + +1.0.3 / 2014-07-09 +================== + + * Add support for multiple wildcards in namespaces (#122, @seegno) + * browser: fix lint + +1.0.2 / 2014-06-10 +================== + + * browser: update color palette (#113, @gscottolson) + * common: make console logging function configurable (#108, @timoxley) + * node: fix %o colors on old node <= 0.8.x + * Makefile: find node path using shell/which (#109, @timoxley) + +1.0.1 / 2014-06-06 +================== + + * browser: use `removeItem()` to clear localStorage + * browser, node: don't set DEBUG if namespaces is undefined (#107, @leedm777) + * package: add "contributors" section + * node: fix comment typo + * README: list authors + +1.0.0 / 2014-06-04 +================== + + * make ms diff be global, not be scope + * debug: ignore empty strings in enable() + * node: make DEBUG_COLORS able to disable coloring + * *: export the `colors` array + * npmignore: don't publish the `dist` dir + * Makefile: refactor to use browserify + * package: add "browserify" as a dev dependency + * Readme: add Web Inspector Colors section + * node: reset terminal color for the debug content + * node: map "%o" to `util.inspect()` + * browser: map "%j" to `JSON.stringify()` + * debug: add custom "formatters" + * debug: use "ms" module for humanizing the diff + * Readme: add "bash" syntax highlighting + * browser: add Firebug color support + * browser: add colors for WebKit browsers + * node: apply log to `console` + * rewrite: abstract common logic for Node & browsers + * add .jshintrc file + +0.8.1 / 2014-04-14 +================== + + * package: re-add the "component" section + +0.8.0 / 2014-03-30 +================== + + * add `enable()` method for nodejs. Closes #27 + * change from stderr to stdout + * remove unnecessary index.js file + +0.7.4 / 2013-11-13 +================== + + * remove "browserify" key from package.json (fixes something in browserify) + +0.7.3 / 2013-10-30 +================== + + * fix: catch localStorage security error when cookies are blocked (Chrome) + * add debug(err) support. Closes #46 + * add .browser prop to package.json. Closes #42 + +0.7.2 / 2013-02-06 +================== + + * fix package.json + * fix: Mobile Safari (private mode) is broken with debug + * fix: Use unicode to send escape character to shell instead of octal to work with strict mode javascript + +0.7.1 / 2013-02-05 +================== + + * add repository URL to package.json + * add DEBUG_COLORED to force colored output + * add browserify support + * fix component. Closes #24 + +0.7.0 / 2012-05-04 +================== + + * Added .component to package.json + * Added debug.component.js build + +0.6.0 / 2012-03-16 +================== + + * Added support for "-" prefix in DEBUG [Vinay Pulim] + * Added `.enabled` flag to the node version [TooTallNate] + +0.5.0 / 2012-02-02 +================== + + * Added: humanize diffs. Closes #8 + * Added `debug.disable()` to the CS variant + * Removed padding. Closes #10 + * Fixed: persist client-side variant again. Closes #9 + +0.4.0 / 2012-02-01 +================== + + * Added browser variant support for older browsers [TooTallNate] + * Added `debug.enable('project:*')` to browser variant [TooTallNate] + * Added padding to diff (moved it to the right) + +0.3.0 / 2012-01-26 +================== + + * Added millisecond diff when isatty, otherwise UTC string + +0.2.0 / 2012-01-22 +================== + + * Added wildcard support + +0.1.0 / 2011-12-02 +================== + + * Added: remove colors unless stderr isatty [TooTallNate] + +0.0.1 / 2010-01-03 +================== + + * Initial release diff --git a/node_modules/needle/node_modules/debug/LICENSE b/node_modules/needle/node_modules/debug/LICENSE new file mode 100644 index 00000000..658c933d --- /dev/null +++ b/node_modules/needle/node_modules/debug/LICENSE @@ -0,0 +1,19 @@ +(The MIT License) + +Copyright (c) 2014 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/needle/node_modules/debug/README.md b/node_modules/needle/node_modules/debug/README.md new file mode 100644 index 00000000..0ee7634d --- /dev/null +++ b/node_modules/needle/node_modules/debug/README.md @@ -0,0 +1,437 @@ +# debug +[![Build Status](https://travis-ci.org/visionmedia/debug.svg?branch=master)](https://travis-ci.org/visionmedia/debug) [![Coverage Status](https://coveralls.io/repos/github/visionmedia/debug/badge.svg?branch=master)](https://coveralls.io/github/visionmedia/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/needle/node_modules/debug/dist/debug.js b/node_modules/needle/node_modules/debug/dist/debug.js new file mode 100644 index 00000000..f271e01c --- /dev/null +++ b/node_modules/needle/node_modules/debug/dist/debug.js @@ -0,0 +1,886 @@ +"use strict"; + +function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +(function (f) { + if ((typeof exports === "undefined" ? "undefined" : _typeof(exports)) === "object" && typeof module !== "undefined") { + module.exports = f(); + } else if (typeof define === "function" && define.amd) { + define([], f); + } else { + var g; + + if (typeof window !== "undefined") { + g = window; + } else if (typeof global !== "undefined") { + g = global; + } else if (typeof self !== "undefined") { + g = self; + } else { + g = this; + } + + g.debug = f(); + } +})(function () { + var define, module, exports; + return function () { + function r(e, n, t) { + function o(i, f) { + if (!n[i]) { + if (!e[i]) { + var c = "function" == typeof require && require; + if (!f && c) return c(i, !0); + if (u) return u(i, !0); + var a = new Error("Cannot find module '" + i + "'"); + throw a.code = "MODULE_NOT_FOUND", a; + } + + var p = n[i] = { + exports: {} + }; + e[i][0].call(p.exports, function (r) { + var n = e[i][1][r]; + return o(n || r); + }, p, p.exports, r, e, n, t); + } + + return n[i].exports; + } + + for (var u = "function" == typeof require && require, i = 0; i < t.length; i++) { + o(t[i]); + } + + return o; + } + + return r; + }()({ + 1: [function (require, module, exports) { + /** + * Helpers. + */ + var s = 1000; + var m = s * 60; + var h = m * 60; + var d = h * 24; + var w = d * 7; + var y = d * 365.25; + /** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + + module.exports = function (val, options) { + options = options || {}; + + var type = _typeof(val); + + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isNaN(val) === false) { + return options.long ? fmtLong(val) : fmtShort(val); + } + + throw new Error('val is not a non-empty string or a valid number. val=' + JSON.stringify(val)); + }; + /** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + + + function parse(str) { + str = String(str); + + if (str.length > 100) { + return; + } + + var match = /^((?:\d+)?\-?\d?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(str); + + if (!match) { + return; + } + + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + + case 'weeks': + case 'week': + case 'w': + return n * w; + + case 'days': + case 'day': + case 'd': + return n * d; + + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + + default: + return undefined; + } + } + /** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + + + function fmtShort(ms) { + var msAbs = Math.abs(ms); + + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + + return ms + 'ms'; + } + /** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + + + function fmtLong(ms) { + var msAbs = Math.abs(ms); + + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + + return ms + ' ms'; + } + /** + * Pluralization helper. + */ + + + function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); + } + }, {}], + 2: [function (require, module, exports) { + // shim for using process in browser + var process = module.exports = {}; // cached from whatever global is present so that test runners that stub it + // don't break things. But we need to wrap it in a try catch in case it is + // wrapped in strict mode code which doesn't define any globals. It's inside a + // function because try/catches deoptimize in certain engines. + + var cachedSetTimeout; + var cachedClearTimeout; + + function defaultSetTimout() { + throw new Error('setTimeout has not been defined'); + } + + function defaultClearTimeout() { + throw new Error('clearTimeout has not been defined'); + } + + (function () { + try { + if (typeof setTimeout === 'function') { + cachedSetTimeout = setTimeout; + } else { + cachedSetTimeout = defaultSetTimout; + } + } catch (e) { + cachedSetTimeout = defaultSetTimout; + } + + try { + if (typeof clearTimeout === 'function') { + cachedClearTimeout = clearTimeout; + } else { + cachedClearTimeout = defaultClearTimeout; + } + } catch (e) { + cachedClearTimeout = defaultClearTimeout; + } + })(); + + function runTimeout(fun) { + if (cachedSetTimeout === setTimeout) { + //normal enviroments in sane situations + return setTimeout(fun, 0); + } // if setTimeout wasn't available but was latter defined + + + if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { + cachedSetTimeout = setTimeout; + return setTimeout(fun, 0); + } + + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedSetTimeout(fun, 0); + } catch (e) { + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedSetTimeout.call(null, fun, 0); + } catch (e) { + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error + return cachedSetTimeout.call(this, fun, 0); + } + } + } + + function runClearTimeout(marker) { + if (cachedClearTimeout === clearTimeout) { + //normal enviroments in sane situations + return clearTimeout(marker); + } // if clearTimeout wasn't available but was latter defined + + + if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { + cachedClearTimeout = clearTimeout; + return clearTimeout(marker); + } + + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedClearTimeout(marker); + } catch (e) { + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedClearTimeout.call(null, marker); + } catch (e) { + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. + // Some versions of I.E. have different rules for clearTimeout vs setTimeout + return cachedClearTimeout.call(this, marker); + } + } + } + + var queue = []; + var draining = false; + var currentQueue; + var queueIndex = -1; + + function cleanUpNextTick() { + if (!draining || !currentQueue) { + return; + } + + draining = false; + + if (currentQueue.length) { + queue = currentQueue.concat(queue); + } else { + queueIndex = -1; + } + + if (queue.length) { + drainQueue(); + } + } + + function drainQueue() { + if (draining) { + return; + } + + var timeout = runTimeout(cleanUpNextTick); + draining = true; + var len = queue.length; + + while (len) { + currentQueue = queue; + queue = []; + + while (++queueIndex < len) { + if (currentQueue) { + currentQueue[queueIndex].run(); + } + } + + queueIndex = -1; + len = queue.length; + } + + currentQueue = null; + draining = false; + runClearTimeout(timeout); + } + + process.nextTick = function (fun) { + var args = new Array(arguments.length - 1); + + if (arguments.length > 1) { + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + } + + queue.push(new Item(fun, args)); + + if (queue.length === 1 && !draining) { + runTimeout(drainQueue); + } + }; // v8 likes predictible objects + + + function Item(fun, array) { + this.fun = fun; + this.array = array; + } + + Item.prototype.run = function () { + this.fun.apply(null, this.array); + }; + + process.title = 'browser'; + process.browser = true; + process.env = {}; + process.argv = []; + process.version = ''; // empty string to avoid regexp issues + + process.versions = {}; + + function noop() {} + + process.on = noop; + process.addListener = noop; + process.once = noop; + process.off = noop; + process.removeListener = noop; + process.removeAllListeners = noop; + process.emit = noop; + process.prependListener = noop; + process.prependOnceListener = noop; + + process.listeners = function (name) { + return []; + }; + + process.binding = function (name) { + throw new Error('process.binding is not supported'); + }; + + process.cwd = function () { + return '/'; + }; + + process.chdir = function (dir) { + throw new Error('process.chdir is not supported'); + }; + + process.umask = function () { + return 0; + }; + }, {}], + 3: [function (require, module, exports) { + /** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + Object.keys(env).forEach(function (key) { + createDebug[key] = env[key]; + }); + /** + * Active `debug` instances. + */ + + createDebug.instances = []; + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + + createDebug.formatters = {}; + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + + function selectColor(namespace) { + var hash = 0; + + for (var i = 0; i < namespace.length; i++) { + hash = (hash << 5) - hash + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + + createDebug.selectColor = selectColor; + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + + function createDebug(namespace) { + var prevTime; + + function debug() { + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + // Disabled? + if (!debug.enabled) { + return; + } + + var self = debug; // Set `diff` timestamp + + var curr = Number(new Date()); + var ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } // Apply any `formatters` transformations + + + var index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, function (match, format) { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return match; + } + + index++; + var formatter = createDebug.formatters[format]; + + if (typeof formatter === 'function') { + var val = args[index]; + match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format` + + args.splice(index, 1); + index--; + } + + return match; + }); // Apply env-specific formatting (colors, etc.) + + createDebug.formatArgs.call(self, args); + var logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.enabled = createDebug.enabled(namespace); + debug.useColors = createDebug.useColors(); + debug.color = selectColor(namespace); + debug.destroy = destroy; + debug.extend = extend; // Debug.formatArgs = formatArgs; + // debug.rawLog = rawLog; + // env-specific initialization logic for debug instances + + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + createDebug.instances.push(debug); + return debug; + } + + function destroy() { + var index = createDebug.instances.indexOf(this); + + if (index !== -1) { + createDebug.instances.splice(index, 1); + return true; + } + + return false; + } + + function extend(namespace, delimiter) { + return createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + } + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + + + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.names = []; + createDebug.skips = []; + var i; + var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + var len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + + for (i = 0; i < createDebug.instances.length; i++) { + var instance = createDebug.instances[i]; + instance.enabled = createDebug.enabled(instance.namespace); + } + } + /** + * Disable debug output. + * + * @api public + */ + + + function disable() { + createDebug.enable(''); + } + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + + + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + var i; + var len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + + + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + + return val; + } + + createDebug.enable(createDebug.load()); + return createDebug; + } + + module.exports = setup; + }, { + "ms": 1 + }], + 4: [function (require, module, exports) { + (function (process) { + /* eslint-env browser */ + + /** + * This is the web browser implementation of `debug()`. + */ + exports.log = log; + exports.formatArgs = formatArgs; + exports.save = save; + exports.load = load; + exports.useColors = useColors; + exports.storage = localstorage(); + /** + * Colors. + */ + + exports.colors = ['#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33']; + /** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + // eslint-disable-next-line complexity + + function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } // Internet Explorer and Edge do not support colors. + + + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + + + return typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 + typeof window !== 'undefined' && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker + typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); + } + /** + * Colorize log arguments if enabled. + * + * @api public + */ + + + function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + var c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + + var index = 0; + var lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, function (match) { + if (match === '%%') { + return; + } + + index++; + + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + args.splice(lastC, 0, c); + } + /** + * Invokes `console.log()` when available. + * No-op when `console.log` is not a "function". + * + * @api public + */ + + + function log() { + var _console; + + // This hackery is required for IE8/9, where + // the `console.log` function doesn't have 'apply' + return (typeof console === "undefined" ? "undefined" : _typeof(console)) === 'object' && console.log && (_console = console).log.apply(_console, arguments); + } + /** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + + + function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) {// Swallow + // XXX (@Qix-) should we be logging these? + } + } + /** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + + + function load() { + var r; + + try { + r = exports.storage.getItem('debug'); + } catch (error) {} // Swallow + // XXX (@Qix-) should we be logging these? + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + + + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; + } + /** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + + + function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) {// Swallow + // XXX (@Qix-) should we be logging these? + } + } + + module.exports = require('./common')(exports); + var formatters = module.exports.formatters; + /** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + + formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } + }; + }).call(this, require('_process')); + }, { + "./common": 3, + "_process": 2 + }] + }, {}, [4])(4); +}); + diff --git a/node_modules/needle/node_modules/debug/node.js b/node_modules/needle/node_modules/debug/node.js new file mode 100644 index 00000000..7fc36fe6 --- /dev/null +++ b/node_modules/needle/node_modules/debug/node.js @@ -0,0 +1 @@ +module.exports = require('./src/node'); diff --git a/node_modules/needle/node_modules/debug/package.json b/node_modules/needle/node_modules/debug/package.json new file mode 100644 index 00000000..3650bb0b --- /dev/null +++ b/node_modules/needle/node_modules/debug/package.json @@ -0,0 +1,51 @@ +{ + "name": "debug", + "version": "3.2.6", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/debug.git" + }, + "description": "small debugging utility", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "node.js", + "dist/debug.js", + "LICENSE", + "README.md" + ], + "author": "TJ Holowaychuk ", + "contributors": [ + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + }, + "devDependencies": { + "@babel/cli": "^7.0.0", + "@babel/core": "^7.0.0", + "@babel/preset-env": "^7.0.0", + "browserify": "14.4.0", + "chai": "^3.5.0", + "concurrently": "^3.1.0", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.0.0", + "karma-chai": "^0.1.0", + "karma-mocha": "^1.3.0", + "karma-phantomjs-launcher": "^1.0.2", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "rimraf": "^2.5.4", + "xo": "^0.23.0" + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "unpkg": "./dist/debug.js" +} diff --git a/node_modules/needle/node_modules/debug/src/browser.js b/node_modules/needle/node_modules/debug/src/browser.js new file mode 100644 index 00000000..c924b0ac --- /dev/null +++ b/node_modules/needle/node_modules/debug/src/browser.js @@ -0,0 +1,180 @@ +"use strict"; + +function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +/** + * Colors. + */ + +exports.colors = ['#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33']; +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ +// eslint-disable-next-line complexity + +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } // Internet Explorer and Edge do not support colors. + + + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + + + return typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 + typeof window !== 'undefined' && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker + typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); +} +/** + * Colorize log arguments if enabled. + * + * @api public + */ + + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + var c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + + var index = 0; + var lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, function (match) { + if (match === '%%') { + return; + } + + index++; + + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + args.splice(lastC, 0, c); +} +/** + * Invokes `console.log()` when available. + * No-op when `console.log` is not a "function". + * + * @api public + */ + + +function log() { + var _console; + + // This hackery is required for IE8/9, where + // the `console.log` function doesn't have 'apply' + return (typeof console === "undefined" ? "undefined" : _typeof(console)) === 'object' && console.log && (_console = console).log.apply(_console, arguments); +} +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + + +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) {// Swallow + // XXX (@Qix-) should we be logging these? + } +} +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + + +function load() { + var r; + + try { + r = exports.storage.getItem('debug'); + } catch (error) {} // Swallow + // XXX (@Qix-) should we be logging these? + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + + + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) {// Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); +var formatters = module.exports.formatters; +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + diff --git a/node_modules/needle/node_modules/debug/src/common.js b/node_modules/needle/node_modules/debug/src/common.js new file mode 100644 index 00000000..e0de3fb5 --- /dev/null +++ b/node_modules/needle/node_modules/debug/src/common.js @@ -0,0 +1,249 @@ +"use strict"; + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + Object.keys(env).forEach(function (key) { + createDebug[key] = env[key]; + }); + /** + * Active `debug` instances. + */ + + createDebug.instances = []; + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + + createDebug.formatters = {}; + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + + function selectColor(namespace) { + var hash = 0; + + for (var i = 0; i < namespace.length; i++) { + hash = (hash << 5) - hash + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + + createDebug.selectColor = selectColor; + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + + function createDebug(namespace) { + var prevTime; + + function debug() { + // Disabled? + if (!debug.enabled) { + return; + } + + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + var self = debug; // Set `diff` timestamp + + var curr = Number(new Date()); + var ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } // Apply any `formatters` transformations + + + var index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, function (match, format) { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return match; + } + + index++; + var formatter = createDebug.formatters[format]; + + if (typeof formatter === 'function') { + var val = args[index]; + match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format` + + args.splice(index, 1); + index--; + } + + return match; + }); // Apply env-specific formatting (colors, etc.) + + createDebug.formatArgs.call(self, args); + var logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.enabled = createDebug.enabled(namespace); + debug.useColors = createDebug.useColors(); + debug.color = selectColor(namespace); + debug.destroy = destroy; + debug.extend = extend; // Debug.formatArgs = formatArgs; + // debug.rawLog = rawLog; + // env-specific initialization logic for debug instances + + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + createDebug.instances.push(debug); + return debug; + } + + function destroy() { + var index = createDebug.instances.indexOf(this); + + if (index !== -1) { + createDebug.instances.splice(index, 1); + return true; + } + + return false; + } + + function extend(namespace, delimiter) { + return createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + } + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + + + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.names = []; + createDebug.skips = []; + var i; + var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + var len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + + for (i = 0; i < createDebug.instances.length; i++) { + var instance = createDebug.instances[i]; + instance.enabled = createDebug.enabled(instance.namespace); + } + } + /** + * Disable debug output. + * + * @api public + */ + + + function disable() { + createDebug.enable(''); + } + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + + + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + var i; + var len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + + + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + + return val; + } + + createDebug.enable(createDebug.load()); + return createDebug; +} + +module.exports = setup; + diff --git a/node_modules/needle/node_modules/debug/src/index.js b/node_modules/needle/node_modules/debug/src/index.js new file mode 100644 index 00000000..02173159 --- /dev/null +++ b/node_modules/needle/node_modules/debug/src/index.js @@ -0,0 +1,12 @@ +"use strict"; + +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} + diff --git a/node_modules/needle/node_modules/debug/src/node.js b/node_modules/needle/node_modules/debug/src/node.js new file mode 100644 index 00000000..dbbb5f10 --- /dev/null +++ b/node_modules/needle/node_modules/debug/src/node.js @@ -0,0 +1,174 @@ +"use strict"; + +/** + * Module dependencies. + */ +var tty = require('tty'); + +var util = require('util'); +/** + * This is the Node.js implementation of `debug()`. + */ + + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + var supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221]; + } +} catch (error) {} // Swallow - we only care if `supports-color` is available; it doesn't have to be. + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + + +exports.inspectOpts = Object.keys(process.env).filter(function (key) { + return /^debug_/i.test(key); +}).reduce(function (obj, key) { + // Camel-case + var prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, function (_, k) { + return k.toUpperCase(); + }); // Coerce string value into JS value + + var val = process.env[key]; + + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd); +} +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + + +function formatArgs(args) { + var name = this.namespace, + useColors = this.useColors; + + if (useColors) { + var c = this.color; + var colorCode = "\x1B[3" + (c < 8 ? c : '8;5;' + c); + var prefix = " ".concat(colorCode, ";1m").concat(name, " \x1B[0m"); + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + "\x1B[0m"); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + + return new Date().toISOString() + ' '; +} +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + + +function log() { + return process.stderr.write(util.format.apply(util, arguments) + '\n'); +} +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + + +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + + +function load() { + return process.env.DEBUG; +} +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + + +function init(debug) { + debug.inspectOpts = {}; + var keys = Object.keys(exports.inspectOpts); + + for (var i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); +var formatters = module.exports.formatters; +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts).replace(/\s*\n\s*/g, ' '); +}; +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + diff --git a/node_modules/needle/node_modules/ms/index.js b/node_modules/needle/node_modules/ms/index.js new file mode 100644 index 00000000..c4498bcc --- /dev/null +++ b/node_modules/needle/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/needle/node_modules/ms/license.md b/node_modules/needle/node_modules/ms/license.md new file mode 100644 index 00000000..69b61253 --- /dev/null +++ b/node_modules/needle/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/needle/node_modules/ms/package.json b/node_modules/needle/node_modules/ms/package.json new file mode 100644 index 00000000..eea666e1 --- /dev/null +++ b/node_modules/needle/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.1.2", + "description": "Tiny millisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.12.1", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1" + } +} diff --git a/node_modules/needle/node_modules/ms/readme.md b/node_modules/needle/node_modules/ms/readme.md new file mode 100644 index 00000000..9a1996b1 --- /dev/null +++ b/node_modules/needle/node_modules/ms/readme.md @@ -0,0 +1,60 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/needle/package.json b/node_modules/needle/package.json new file mode 100644 index 00000000..0ac9f5d6 --- /dev/null +++ b/node_modules/needle/package.json @@ -0,0 +1,70 @@ +{ + "name": "needle", + "version": "2.4.0", + "description": "The leanest and most handsome HTTP client in the Nodelands.", + "keywords": [ + "http", + "https", + "simple", + "request", + "client", + "multipart", + "upload", + "proxy", + "deflate", + "timeout", + "charset", + "iconv", + "cookie", + "redirect" + ], + "tags": [ + "http", + "https", + "simple", + "request", + "client", + "multipart", + "upload", + "proxy", + "deflate", + "timeout", + "charset", + "iconv", + "cookie", + "redirect" + ], + "author": "Tomás Pollak ", + "repository": { + "type": "git", + "url": "https://github.com/tomas/needle.git" + }, + "dependencies": { + "debug": "^3.2.6", + "iconv-lite": "^0.4.4", + "sax": "^1.2.4" + }, + "devDependencies": { + "JSONStream": "^1.3.5", + "jschardet": "^1.6.0", + "mocha": "^5.2.0", + "q": "^1.5.1", + "should": "^13.2.3", + "sinon": "^2.3.0", + "xml2js": "^0.4.19" + }, + "scripts": { + "test": "mocha test" + }, + "directories": { + "lib": "./lib" + }, + "main": "./lib/needle", + "bin": { + "needle": "./bin/needle" + }, + "license": "MIT", + "engines": { + "node": ">= 4.4.x" + } +} diff --git a/node_modules/needle/test/basic_auth_spec.js b/node_modules/needle/test/basic_auth_spec.js new file mode 100644 index 00000000..343f1db0 --- /dev/null +++ b/node_modules/needle/test/basic_auth_spec.js @@ -0,0 +1,196 @@ +var helpers = require('./helpers'), + should = require('should'), + needle = require('./../'), + server; + +var port = 7707; + +describe('Basic Auth', function() { + + before(function(done) { + server = helpers.server({ port: port }, done); + }) + + after(function(done) { + server.close(done); + }) + + ///////////////// helpers + + var get_auth = function(header) { + var token = header.split(/\s+/).pop(); + return token && Buffer.from(token, 'base64').toString().split(':'); + } + + describe('when neither username or password are passed', function() { + + it('doesnt send any Authorization headers', function(done) { + needle.get('localhost:' + port, { parse: true }, function(err, resp) { + var sent_headers = resp.body.headers; + Object.keys(sent_headers).should.not.containEql('authorization'); + done(); + }) + }) + + }) + + describe('when username is an empty string, and password is a valid string', function() { + + var opts = { username: '', password: 'foobar', parse: true }; + + it('doesnt send any Authorization headers', function(done) { + needle.get('localhost:' + port, { parse: true }, function(err, resp) { + var sent_headers = resp.body.headers; + Object.keys(sent_headers).should.not.containEql('authorization'); + done(); + }) + }) + + }); + + describe('when username is a valid string, but no username is passed', function() { + + var opts = { username: 'foobar', parse: true }; + + it('sends Authorization header', function(done) { + needle.get('localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + Object.keys(sent_headers).should.containEql('authorization'); + done(); + }) + }) + + it('Basic Auth only includes username, without colon', function(done) { + needle.get('localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + var auth = get_auth(sent_headers['authorization']); + auth[0].should.equal('foobar'); + auth.should.have.lengthOf(1); + done(); + }) + }) + + }) + + describe('when username is a valid string, and password is null', function() { + + var opts = { username: 'foobar', password: null, parse: true }; + + it('sends Authorization header', function(done) { + needle.get('localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + Object.keys(sent_headers).should.containEql('authorization'); + done(); + }) + }) + + it('Basic Auth only includes both username and password', function(done) { + needle.get('localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + var auth = get_auth(sent_headers['authorization']); + auth[0].should.equal('foobar'); + auth[1].should.equal(''); + done(); + }) + }) + + }) + + describe('when username is a valid string, and password is an empty string', function() { + + var opts = { username: 'foobar', password: '', parse: true }; + + it('sends Authorization header', function(done) { + needle.get('localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + Object.keys(sent_headers).should.containEql('authorization'); + done(); + }) + }) + + it('Basic Auth only includes both username and password', function(done) { + needle.get('localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + var auth = get_auth(sent_headers['authorization']); + auth[0].should.equal('foobar'); + auth[1].should.equal(''); + auth.should.have.lengthOf(2); + done(); + }) + }) + + }) + + describe('when username AND password are non empty strings', function() { + + var opts = { username: 'foobar', password: 'jakub', parse: true }; + + it('sends Authorization header', function(done) { + needle.get('localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + Object.keys(sent_headers).should.containEql('authorization'); + done(); + }) + }) + + it('Basic Auth only includes both user and password', function(done) { + needle.get('localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + var auth = get_auth(sent_headers['authorization']); + auth[0].should.equal('foobar'); + auth[1].should.equal('jakub'); + auth.should.have.lengthOf(2); + done(); + }) + }) + + }) + + describe('URL with @ but not username/pass', function() { + it('doesnt send Authorization header', function(done) { + var url = 'localhost:' + port + '/abc/@def/xyz.zip'; + + needle.get(url, {}, function(err, resp) { + var sent_headers = resp.body.headers; + Object.keys(sent_headers).should.not.containEql('authorization'); + done(); + }) + }) + + it('sends user:pass headers if passed via options', function(done) { + var url = 'localhost:' + port + '/abc/@def/xyz.zip'; + + needle.get(url, { username: 'foo' }, function(err, resp) { + var sent_headers = resp.body.headers; + Object.keys(sent_headers).should.containEql('authorization'); + sent_headers['authorization'].should.eql('Basic Zm9v') + done(); + }) + }) + }) + + describe('when username/password are included in URL', function() { + var opts = { parse: true }; + + it('sends Authorization header', function(done) { + needle.get('foobar:jakub@localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + Object.keys(sent_headers).should.containEql('authorization'); + done(); + }) + }) + + it('Basic Auth only includes both user and password', function(done) { + needle.get('foobar:jakub@localhost:' + port, opts, function(err, resp) { + var sent_headers = resp.body.headers; + var auth = get_auth(sent_headers['authorization']); + auth[0].should.equal('foobar'); + auth[1].should.equal('jakub'); + auth.should.have.lengthOf(2); + done(); + }) + }) + + }) + +}) diff --git a/node_modules/needle/test/compression_spec.js b/node_modules/needle/test/compression_spec.js new file mode 100644 index 00000000..ea5f3b19 --- /dev/null +++ b/node_modules/needle/test/compression_spec.js @@ -0,0 +1,94 @@ +var should = require('should'), + needle = require('./../'), + http = require('http'), + zlib = require('zlib'), + stream = require('stream'), + port = 11123, + server; + +describe('compression', function(){ + + require.bind(null, 'zlib').should.not.throw() + + var jsonData = '{"foo":"bar"}'; + + describe('when server supports compression', function(){ + + before(function(){ + server = http.createServer(function(req, res) { + var raw = new stream.PassThrough(); + + var acceptEncoding = req.headers['accept-encoding']; + if (!acceptEncoding) { + acceptEncoding = ''; + } + + if (acceptEncoding.match(/\bdeflate\b/)) { + res.setHeader('Content-Encoding', 'deflate'); + raw.pipe(zlib.createDeflate()).pipe(res); + } else if (acceptEncoding.match(/\bgzip\b/)) { + res.setHeader('Content-Encoding', 'gzip'); + raw.pipe(zlib.createGzip()).pipe(res); + } else { + raw.pipe(res); + } + + res.setHeader('Content-Type', 'application/json') + if (req.headers['with-bad']) { + res.end('foo'); // end, no deflate data + } else { + raw.end(jsonData) + } + + }) + + server.listen(port); + }); + + after(function(done){ + server.close(done); + }) + + describe('and client requests no compression', function() { + it('should have the body decompressed', function(done){ + needle.get('localhost:' + port, function(err, response, body){ + should.ifError(err); + body.should.have.property('foo', 'bar'); + response.bytes.should.equal(jsonData.length); + done(); + }) + }) + }) + + describe('and client requests gzip compression', function() { + it('should have the body decompressed', function(done){ + needle.get('localhost:' + port, {headers: {'Accept-Encoding': 'gzip'}}, function(err, response, body){ + should.ifError(err); + body.should.have.property('foo', 'bar'); + response.bytes.should.not.equal(jsonData.length); + done(); + }) + }) + }) + + describe('and client requests deflate compression', function() { + it('should have the body decompressed', function(done){ + needle.get('localhost:' + port, {headers: {'Accept-Encoding': 'deflate'}}, function(err, response, body){ + should.ifError(err); + body.should.have.property('foo', 'bar'); + response.bytes.should.not.equal(jsonData.length); + done(); + }) + }) + + it('should rethrow errors from decompressors', function(done){ + needle.get('localhost:' + port, {headers: {'Accept-Encoding': 'deflate', 'With-Bad': 'true'}}, function(err, response, body) { + should.exist(err); + err.message.should.equal("incorrect header check"); + err.code.should.equal("Z_DATA_ERROR") + done(); + }) + }) + }) + }) +}) diff --git a/node_modules/needle/test/cookies_spec.js b/node_modules/needle/test/cookies_spec.js new file mode 100644 index 00000000..04b5f11b --- /dev/null +++ b/node_modules/needle/test/cookies_spec.js @@ -0,0 +1,305 @@ +var needle = require('../'), + cookies = require('../lib/cookies'), + sinon = require('sinon'), + http = require('http'), + should = require('should'), + assert = require('assert'); + +var WEIRD_COOKIE_NAME = 'wc', + BASE64_COOKIE_NAME = 'bc', + FORBIDDEN_COOKIE_NAME = 'fc', + NUMBER_COOKIE_NAME = 'nc'; + +var WEIRD_COOKIE_VALUE = '!\'*+#()&-./0123456789:<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~', + BASE64_COOKIE_VALUE = 'Y29va2llCg==', + FORBIDDEN_COOKIE_VALUE = ' ;"\\,', + NUMBER_COOKIE_VALUE = 12354342; + +var TEST_HOST = 'localhost', + NO_COOKIES_TEST_PORT = 11112, + ALL_COOKIES_TEST_PORT = 11113; + +describe('cookies', function() { + + var setCookieHeader, headers, server, opts; + + function decode(str) { + return decodeURIComponent(str); + } + + function encode(str) { + str = str.toString().replace(/[\x00-\x1F\x7F]/g, encodeURIComponent); + return str.replace(/[\s\"\,;\\%]/g, encodeURIComponent); + } + + before(function() { + setCookieHeader = [ + WEIRD_COOKIE_NAME + '=' + encode(WEIRD_COOKIE_VALUE) + ';', + BASE64_COOKIE_NAME + '=' + encode(BASE64_COOKIE_VALUE) + ';', + FORBIDDEN_COOKIE_NAME + '=' + encode(FORBIDDEN_COOKIE_VALUE) + ';', + NUMBER_COOKIE_NAME + '=' + encode(NUMBER_COOKIE_VALUE) + ';' + ]; + }); + + before(function(done) { + serverAllCookies = http.createServer(function(req, res) { + res.setHeader('Content-Type', 'text/html'); + res.setHeader('Set-Cookie', setCookieHeader); + res.end('200'); + }).listen(ALL_COOKIES_TEST_PORT, TEST_HOST, done); + }); + + after(function(done) { + serverAllCookies.close(done); + }); + + describe('with default options', function() { + it('no cookie header is set on request', function(done) { + needle.get( + TEST_HOST + ':' + ALL_COOKIES_TEST_PORT, function(err, response) { + assert(!response.req._headers.cookie); + done(); + }); + }); + }); + + describe('if response does not contain cookies', function() { + before(function(done) { + serverNoCookies = http.createServer(function(req, res) { + res.setHeader('Content-Type', 'text/html'); + res.end('200'); + }).listen(NO_COOKIES_TEST_PORT, TEST_HOST, done); + }); + + it('response.cookies is undefined', function(done) { + needle.get( + TEST_HOST + ':' + NO_COOKIES_TEST_PORT, function(error, response) { + assert(!response.cookies); + done(); + }); + }); + + after(function(done) { + serverNoCookies.close(done); + }); + }); + + describe('if response contains cookies', function() { + + it('puts them on resp.cookies', function(done) { + needle.get( + TEST_HOST + ':' + ALL_COOKIES_TEST_PORT, function(error, response) { + response.should.have.property('cookies'); + done(); + }); + }); + + it('parses them as a object', function(done) { + needle.get( + TEST_HOST + ':' + ALL_COOKIES_TEST_PORT, function(error, response) { + response.cookies.should.be.an.instanceOf(Object) + .and.have.property(WEIRD_COOKIE_NAME); + response.cookies.should.have.property(BASE64_COOKIE_NAME); + response.cookies.should.have.property(FORBIDDEN_COOKIE_NAME); + response.cookies.should.have.property(NUMBER_COOKIE_NAME); + done(); + }); + }); + + it('must decode it', function(done) { + needle.get( + TEST_HOST + ':' + ALL_COOKIES_TEST_PORT, function(error, response) { + response.cookies.wc.should.be.eql(WEIRD_COOKIE_VALUE); + response.cookies.bc.should.be.eql(BASE64_COOKIE_VALUE); + response.cookies.fc.should.be.eql(FORBIDDEN_COOKIE_VALUE); + response.cookies.nc.should.be.eql(NUMBER_COOKIE_VALUE.toString()); + done(); + }); + }); + + describe('when a cookie value is invalid', function() { + + before(function() { + setCookieHeader = [ + 'geo_city=%D1%E0%ED%EA%F2-%CF%E5%F2%E5%F0%E1%F3%F0%E3' + ]; + }) + + it('doesnt blow up', function(done) { + needle.get(TEST_HOST + ':' + ALL_COOKIES_TEST_PORT, function(error, response) { + should.not.exist(error) + var whatever = 'efbfbdefbfbdefbfbdefbfbdefbfbd2defbfbdefbfbdefbfbdefbfbdefbfbdefbfbdefbfbdefbfbdefbfbd'; + Buffer.from(response.cookies.geo_city).toString('hex').should.eql(whatever) + done(); + }); + }) + + }) + + describe('and response is a redirect', function() { + + var redirectServer, testPort = 22222; + + var responseCookies = [ + [ // first req + WEIRD_COOKIE_NAME + '=' + encode(WEIRD_COOKIE_VALUE) + ';', + BASE64_COOKIE_NAME + '=' + encode(BASE64_COOKIE_VALUE) + ';', + 'FOO=123;' + ], [ // second req + FORBIDDEN_COOKIE_NAME + '=' + encode(FORBIDDEN_COOKIE_VALUE) + ';', + NUMBER_COOKIE_NAME + '=' + encode(NUMBER_COOKIE_VALUE) + ';' + ], [ // third red + 'FOO=BAR;' + ] + ] + + before(function() { + redirectServer = http.createServer(function(req, res) { + var number = parseInt(req.url.replace('/', '')); + var nextUrl = 'http://' + TEST_HOST + ':' + testPort + '/' + (number + 1); + + if (responseCookies[number]) { // got cookies + res.statusCode = 302; + res.setHeader('Set-Cookie', responseCookies[number]); + res.setHeader('Location', nextUrl); + } else if (number == 3) { + res.statusCode = 302; // redirect but without cookies + res.setHeader('Location', nextUrl); + } + + res.end('OK'); + }).listen(22222, TEST_HOST); + }); + + after(function(done) { + redirectServer.close(done); + }) + + describe('and follow_set_cookies is false', function() { + + var opts = { + follow_set_cookies: false, + follow_max: 4 + }; + + it('no cookie header set on redirection request', function(done) { + var spy = sinon.spy(cookies, 'write'); + + needle.get(TEST_HOST + ':' + testPort + '/0', opts, function(err, resp) { + spy.callCount.should.eql(0); + done(); + }); + }); + }); + + describe('and follow_set_cookies is true', function() { + var opts = { + follow_set_cookies: true, + follow_max: 4 + }; + + it('should have all the cookies', function(done) { + needle.get(TEST_HOST + ':' + testPort + '/0', opts, function(err, resp) { + resp.cookies.should.have.property(WEIRD_COOKIE_NAME); + resp.cookies.should.have.property(BASE64_COOKIE_NAME); + resp.cookies.should.have.property(FORBIDDEN_COOKIE_NAME); + resp.cookies.should.have.property(NUMBER_COOKIE_NAME); + resp.cookies.should.have.property('FOO'); + resp.cookies.FOO.should.eql('BAR'); // should overwrite previous one + done(); + }); + }); + }); + }); + + describe('with parse_cookies = false', function() { + it('does not parse them', function(done) { + needle.get( + TEST_HOST + ':' + ALL_COOKIES_TEST_PORT, { parse_cookies: false }, function(error, response) { + assert(!response.cookies); + done(); + }); + }); + }); + }); + + describe('if request contains cookie header', function() { + var opts = { + cookies: {} + }; + + before(function() { + opts.cookies[WEIRD_COOKIE_NAME] = WEIRD_COOKIE_VALUE; + opts.cookies[BASE64_COOKIE_NAME] = BASE64_COOKIE_VALUE; + opts.cookies[FORBIDDEN_COOKIE_NAME] = FORBIDDEN_COOKIE_VALUE; + opts.cookies[NUMBER_COOKIE_NAME] = NUMBER_COOKIE_VALUE; + }); + + it('must be a valid cookie string', function(done) { + var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/; + + var full_header = [ + WEIRD_COOKIE_NAME + '=' + WEIRD_COOKIE_VALUE, + BASE64_COOKIE_NAME + '=' + BASE64_COOKIE_VALUE, + FORBIDDEN_COOKIE_NAME + '=' + encode(FORBIDDEN_COOKIE_VALUE), + NUMBER_COOKIE_NAME + '=' + NUMBER_COOKIE_VALUE + ].join('; ') + + needle.get(TEST_HOST + ':' + ALL_COOKIES_TEST_PORT, opts, function(error, response) { + var cookieString = response.req._headers.cookie; + cookieString.should.be.type('string'); + + cookieString.split(/\s*;\s*/).forEach(function(pair) { + COOKIE_PAIR.test(pair).should.be.exactly(true); + }); + + cookieString.should.be.exactly(full_header); + done(); + }); + }); + + it('dont have to encode allowed characters', function(done) { + var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/, + KEY_INDEX = 1, + VALUE_INEX = 3; + + needle.get(TEST_HOST + ':' + ALL_COOKIES_TEST_PORT, opts, function(error, response) { + var cookieObj = {}, + cookieString = response.req._headers.cookie; + + cookieString.split(/\s*;\s*/).forEach(function(str) { + var pair = COOKIE_PAIR.exec(str); + cookieObj[pair[KEY_INDEX]] = pair[VALUE_INEX]; + }); + + cookieObj[WEIRD_COOKIE_NAME].should.be.exactly(WEIRD_COOKIE_VALUE); + cookieObj[BASE64_COOKIE_NAME].should.be.exactly(BASE64_COOKIE_VALUE); + done(); + }); + }); + + it('must encode forbidden characters', function(done) { + var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/, + KEY_INDEX = 1, + VALUE_INEX = 3; + + needle.get(TEST_HOST + ':' + ALL_COOKIES_TEST_PORT, opts, function(error, response) { + var cookieObj = {}, + cookieString = response.req._headers.cookie; + + cookieString.split(/\s*;\s*/).forEach(function(str) { + var pair = COOKIE_PAIR.exec(str); + cookieObj[pair[KEY_INDEX]] = pair[VALUE_INEX]; + }); + + cookieObj[FORBIDDEN_COOKIE_NAME].should.not.be.eql( + FORBIDDEN_COOKIE_VALUE); + cookieObj[FORBIDDEN_COOKIE_NAME].should.be.exactly( + encode(FORBIDDEN_COOKIE_VALUE)); + cookieObj[FORBIDDEN_COOKIE_NAME].should.be.exactly( + encodeURIComponent(FORBIDDEN_COOKIE_VALUE)); + done(); + }); + }); + }); +}); diff --git a/node_modules/needle/test/decoder_spec.js b/node_modules/needle/test/decoder_spec.js new file mode 100644 index 00000000..5225f6fd --- /dev/null +++ b/node_modules/needle/test/decoder_spec.js @@ -0,0 +1,86 @@ +var should = require('should'), + needle = require('./../'), + Q = require('q'), + chardet = require('jschardet'); + +describe('character encoding', function() { + + var url; + this.timeout(5000); + + describe('test A', function() { + + before(function() { + url = 'http://www.nina.jp/server/slackware/webapp/tomcat_charset.html'; + }) + + describe('with decode = false', function() { + + it('does not decode', function(done) { + + needle.get(url, { decode: false }, function(err, resp) { + resp.body.should.be.a.String; + chardet.detect(resp.body).encoding.should.eql('windows-1252'); + resp.body.indexOf('EUCを使う').should.eql(-1); + done(); + }) + + }) + + }) + + describe('with decode = true', function() { + + it('decodes', function(done) { + + needle.get(url, { decode: true }, function(err, resp) { + resp.body.should.be.a.String; + chardet.detect(resp.body).encoding.should.eql('ascii'); + resp.body.indexOf('EUCを使う').should.not.eql(-1); + done(); + }) + + }) + + }) + + }) + + describe('test B', function() { + + it('encodes to UTF-8', function(done) { + + // Our Needle wrapper that requests a chinese website. + var task = Q.nbind(needle.get, needle, 'http://www.chinesetop100.com/'); + + // Different instantiations of this task + var tasks = [Q.fcall(task, {decode: true}), + Q.fcall(task, {decode: false})]; + + var results = tasks.map(function(task) { + return task.then(function(obj) { + return obj[0].body; + }); + }); + + // Execute all requests concurrently + Q.all(results).done(function(bodies) { + + var charsets = [ + chardet.detect(bodies[0]).encoding, + chardet.detect(bodies[1]).encoding, + ] + + // We wanted to decode our first stream. + charsets[0].should.equal('ascii'); + bodies[0].indexOf('全球中文网站前二十强').should.not.equal(-1); + + // But not our second stream. + charsets[1].should.equal('windows-1252'); + bodies[1].indexOf('全球中文网站前二十强').should.equal(-1); + + done(); + }); + }) + }) +}) diff --git a/node_modules/needle/test/errors_spec.js b/node_modules/needle/test/errors_spec.js new file mode 100644 index 00000000..2f488f5f --- /dev/null +++ b/node_modules/needle/test/errors_spec.js @@ -0,0 +1,286 @@ +var needle = require('../'), + sinon = require('sinon'), + should = require('should'), + http = require('http'), + Emitter = require('events').EventEmitter, + helpers = require('./helpers'); + +var get_catch = function(url, opts) { + var err; + try { + needle.get(url, opts); + } catch(e) { + err = e; + } + return err; +} + +describe('errors', function() { + + after(function(done) { + setTimeout(done, 100) + }) + + describe('when host does not exist', function() { + + var url = 'http://unexistinghost/foo'; + + describe('with callback', function() { + + it('does not throw', function() { + var ex = get_catch(url); + should.not.exist(ex); + }) + + it('callbacks an error', function(done) { + needle.get(url, function(err) { + err.should.be.a.Error; + done(); + }) + }) + + it('error should be ENOTFOUND or EADDRINFO or EAI_AGAIN', function(done) { + needle.get(url, function(err) { + err.code.should.match(/ENOTFOUND|EADDRINFO|EAI_AGAIN/) + done(); + }) + }) + + it('does not callback a response', function(done) { + needle.get(url, function(err, resp) { + should.not.exist(resp); + done(); + }) + }) + + it('does not emit an error event', function(done) { + var emitted = false; + var req = needle.get(url, function(err, resp) { }) + + req.on('error', function() { + emitted = true; + }) + + setTimeout(function() { + emitted.should.eql(false); + done(); + }, 100); + }) + + }) + + describe('without callback', function() { + + it('does not throw', function() { + var ex = get_catch(url); + should.not.exist(ex); + }) + + it('emits end event once, with error', function(done) { + var callcount = 0, + stream = needle.get(url); + + stream.on('done', function(err) { + callcount++; + }) + + setTimeout(function() { + callcount.should.equal(1); + done(); + }, 200) + }) + + it('error should be ENOTFOUND or EADDRINFO or EAI_AGAIN', function(done) { + var errorific, + stream = needle.get(url); + + stream.on('done', function(err) { + errorific = err; + }) + + setTimeout(function() { + should.exist(errorific); + errorific.code.should.match(/ENOTFOUND|EADDRINFO|EAI_AGAIN/) + done(); + }, 200) + }) + + it('does not emit a readable event', function(done) { + var called = false, + stream = needle.get(url); + + stream.on('readable', function() { + called = true; + }) + + setTimeout(function() { + called.should.be.false; + done(); + }, 50) + }) + + it('does not emit an error event', function(done) { + var emitted = false, + req = needle.get(url); + + req.on('error', function() { + emitted = true; + }) + + setTimeout(function() { + emitted.should.eql(false); + done(); + }, 100); + }) + + }) + + }) + + describe('when request times out waiting for response', function() { + + var server, + url = 'http://localhost:3333/foo'; + + var send_request = function(cb) { + return needle.get(url, { response_timeout: 200 }, cb); + } + + before(function() { + server = helpers.server({ port: 3333, wait: 1000 }); + }) + + after(function() { + server.close(); + }) + + describe('with callback', function() { + + it('aborts the request', function(done) { + + var time = new Date(); + + send_request(function(err) { + var timediff = (new Date() - time); + timediff.should.be.within(200, 300); + done(); + }) + + }) + + it('callbacks an error', function(done) { + send_request(function(err) { + err.should.be.a.Error; + done(); + }) + }) + + it('error should be ECONNRESET', function(done) { + send_request(function(err) { + err.code.should.equal('ECONNRESET') + done(); + }) + }) + + it('does not callback a response', function(done) { + send_request(function(err, resp) { + should.not.exist(resp); + done(); + }) + }) + + it('does not emit an error event', function(done) { + var emitted = false; + + var req = send_request(function(err, resp) { + should.not.exist(resp); + }) + + req.on('error', function() { + emitted = true; + }) + + setTimeout(function() { + emitted.should.eql(false); + done(); + }, 350); + }) + + }) + + describe('without callback', function() { + + it('emits done event once, with error', function(done) { + var called = 0, + stream = send_request(); + + stream.on('done', function(err) { + called++; + }) + + setTimeout(function() { + called.should.equal(1); + done(); + }, 250) + }) + + it('aborts the request', function(done) { + + var time = new Date(); + var stream = send_request(); + + stream.on('done', function(err) { + var timediff = (new Date() - time); + timediff.should.be.within(200, 300); + done(); + }) + + }) + + it('error should be ECONNRESET', function(done) { + var error, + stream = send_request(); + + stream.on('done', function(err) { + error = err; + }) + + setTimeout(function() { + error.code.should.equal('ECONNRESET') + done(); + }, 250) + }) + + it('does not emit a readable event', function(done) { + var called = false, + stream = send_request(); + + stream.on('readable', function() { + called = true; + }) + + setTimeout(function() { + called.should.be.false; + done(); + }, 250) + }) + + it('does not emit an error event', function(done) { + var emitted = false; + var req = send_request(); + + req.on('error', function() { + emitted = true; + }) + + setTimeout(function() { + emitted.should.eql(false); + done(); + }, 100); + }) + + }) + + }) + +}) diff --git a/node_modules/needle/test/headers_spec.js b/node_modules/needle/test/headers_spec.js new file mode 100644 index 00000000..e6693ef7 --- /dev/null +++ b/node_modules/needle/test/headers_spec.js @@ -0,0 +1,198 @@ +var http = require('http'), + helpers = require('./helpers'), + should = require('should'); + +var port = 54321; + +describe('request headers', function() { + + var needle, + server, + existing_sockets, + original_defaultMaxSockets; + + before(function(done) { + setTimeout(function() { + existing_sockets = get_active_sockets().length; + server = helpers.server({ port: port }, done); + }, 100); + }) + + after(function(done) { + server.close(done) + }) + + function send_request(opts, cb) { + needle.get('http://localhost:' + port, opts, cb); + } + + function get_active_sockets() { + var handles = process._getActiveHandles(); + + // only return the ones that have a .end() function (like a socket) + return handles.filter(function(el) { + if (el.constructor.name.toString() == 'Socket') { + return el.destroyed !== true; + } + }) + } + + describe('old node versions (<0.11.4) with persistent keep-alive connections', function() { + + before(function() { + delete require.cache[require.resolve('..')] // in case it was already loaded + original_defaultMaxSockets = http.Agent.defaultMaxSockets; + http.Agent.defaultMaxSockets = 5; + needle = require('..'); + }) + + after(function() { + http.Agent.defaultMaxSockets = original_defaultMaxSockets; + delete require.cache[require.resolve('..')] + }) + + describe('default options', function() { + + it('sends a Connection: close header', function(done) { + send_request({}, function(err, resp) { + resp.body.headers['connection'].should.eql('close'); + done(); + }) + }) + + it('no open sockets remain after request', function(done) { + send_request({}, function(err, resp) { + get_active_sockets().length.should.eql(existing_sockets); + done(); + }); + }) + + }) + + describe('passing connection: close', function() { + + it('sends a Connection: close header', function(done) { + send_request({ connection: 'close' }, function(err, resp) { + resp.body.headers['connection'].should.eql('close'); + done(); + }) + }) + + it('no open sockets remain after request', function(done) { + send_request({ connection: 'close' }, function(err, resp) { + get_active_sockets().length.should.eql(existing_sockets); + done(); + }); + }) + + }) + + describe('passing connection: keep-alive', function() { + + it('sends a Connection: keep-alive header (using options.headers.connection)', function(done) { + send_request({ headers: { connection: 'keep-alive' }}, function(err, resp) { + resp.body.headers['connection'].should.eql('keep-alive'); + done(); + }) + }) + + it('sends a Connection: keep-alive header (using options.connection)', function(done) { + send_request({ connection: 'keep-alive' }, function(err, resp) { + resp.body.headers['connection'].should.eql('keep-alive'); + done(); + }) + }) + + it('one open socket remain after request', function(done) { + send_request({ connection: 'keep-alive' }, function(err, resp) { + get_active_sockets().length.should.eql(existing_sockets + 1); + done(); + }); + }) + + }) + + }) + + describe('new node versions with smarter connection disposing', function() { + + before(function() { + delete require.cache[require.resolve('..')] + original_defaultMaxSockets = http.Agent.defaultMaxSockets; + http.Agent.defaultMaxSockets = Infinity; + needle = require('..'); + }) + + after(function() { + http.Agent.defaultMaxSockets = original_defaultMaxSockets; + delete require.cache[require.resolve('..')] + }) + + describe('default options', function() { + + // TODO: + // this is weird. by default, new node versions set a 'close' header + // while older versions set a keep-alive header + + it.skip('sets a Connection header', function(done) { + send_request({}, function(err, resp) { + // should.not.exist(resp.body.headers['connection']); + // done(); + }) + }) + + it.skip('one open sockets remain after request', function(done) { + send_request({}, function(err, resp) { + // get_active_sockets().length.should.eql(1); + // done(); + }); + }) + + }) + + describe('passing connection: close', function() { + + it('sends a Connection: close header', function(done) { + send_request({ connection: 'close' }, function(err, resp) { + resp.body.headers['connection'].should.eql('close'); + done(); + }) + }) + + it('no open sockets remain after request', function(done) { + send_request({ connection: 'close' }, function(err, resp) { + get_active_sockets().length.should.eql(existing_sockets); + done(); + }); + }) + + }) + + describe('passing connection: keep-alive', function() { + + it('sends a Connection: keep-alive header (using options.headers.connection)', function(done) { + send_request({ headers: { connection: 'keep-alive' }}, function(err, resp) { + resp.body.headers['connection'].should.eql('keep-alive'); + done(); + }) + }) + + it('sends a Connection: keep-alive header (using options.connection)', function(done) { + send_request({ connection: 'keep-alive' }, function(err, resp) { + resp.body.headers['connection'].should.eql('keep-alive'); + done(); + }) + }) + + it('one open socket remain after request', function(done) { + send_request({ connection: 'keep-alive' }, function(err, resp) { + get_active_sockets().length.should.eql(existing_sockets + 1); + done(); + }); + }) + + }) + + }) + +}) diff --git a/node_modules/needle/test/helpers.js b/node_modules/needle/test/helpers.js new file mode 100644 index 00000000..a85f5487 --- /dev/null +++ b/node_modules/needle/test/helpers.js @@ -0,0 +1,72 @@ +var fs = require('fs'); + +var protocols = { + http : require('http'), + https : require('https') +} + +var keys = { + cert : fs.readFileSync(__dirname + '/keys/ssl.cert'), + key : fs.readFileSync(__dirname + '/keys/ssl.key') +} + +var helpers = {}; + +helpers.server = function(opts, cb) { + + var defaults = { + code : 200, + headers : {'Content-Type': 'application/json'} + } + + var mirror_response = function(req) { + return JSON.stringify({ + headers: req.headers, + body: req.body + }) + } + + var get = function(what) { + if (!opts[what]) + return defaults[what]; + + if (typeof opts[what] == 'function') + return opts[what](); // set them at runtime + else + return opts[what]; + } + + var finish = function(req, res) { + res.writeHead(get('code'), get('headers')); + res.end(opts.response || mirror_response(req)); + } + + var handler = function(req, res) { + + req.setEncoding('utf8'); // get as string + req.body = ''; + req.on('data', function(str) { req.body += str }) + req.socket.on('error', function(e) { + // res.writeHead(500, {'Content-Type': 'text/plain'}); + // res.end('Error: ' + e.message); + }) + + setTimeout(function(){ + finish(req, res); + }, opts.wait || 0); + + }; + + var protocol = opts.protocol || 'http'; + var server; + + if (protocol == 'https') + server = protocols[protocol].createServer(keys, handler); + else + server = protocols[protocol].createServer(handler); + + server.listen(opts.port, cb); + return server; +} + +module.exports = helpers; \ No newline at end of file diff --git a/node_modules/needle/test/long_string_spec.js b/node_modules/needle/test/long_string_spec.js new file mode 100644 index 00000000..4baa4fab --- /dev/null +++ b/node_modules/needle/test/long_string_spec.js @@ -0,0 +1,34 @@ +var needle = require('../'), + should = require('should'); + +describe('when posting a very long string', function() { + + this.timeout(20000); + + function get_string(length) { + var str = ''; + for (var i = 0; i < length; i++) { + str += 'x'; + } + return str; + } + + it("shouldn't throw an EPIPE error out of nowhere", function(done) { + var error; + + function finished() { + setTimeout(function() { + should.not.exist(error); + done(); + }, 300); + } + + try { + needle.post('https://google.com', { data: get_string(Math.pow(2, 20)) }, finished) + } catch(e) { + error = e; + } + + }) + +}) diff --git a/node_modules/needle/test/output_spec.js b/node_modules/needle/test/output_spec.js new file mode 100644 index 00000000..14fc5bb0 --- /dev/null +++ b/node_modules/needle/test/output_spec.js @@ -0,0 +1,254 @@ +var should = require('should'), + needle = require('./../'), + http = require('http'), + sinon = require('sinon'), + stream = require('stream'), + fs = require('fs'), + port = 11111, + server; + +describe('with output option', function() { + + var server, handler, file = '/tmp/foobar.out'; + + function send_request_cb(where, cb) { + var url = 'http://localhost:' + port + '/whatever.file'; + return needle.get(url, { output: where }, cb); + } + + function send_request_stream(where, cb) { + var url = 'http://localhost:' + port + '/whatever.file'; + var stream = needle.get(url, { output: where }); + stream.on('end', cb); + } + + // this will only work in UNICES + function get_open_file_descriptors() { + var list = fs.readdirSync('/proc/self/fd'); + return list.length; + } + + var send_request = send_request_cb; + + before(function(){ + server = http.createServer(function(req, res) { + handler(req, res); + }).listen(port); + }); + + after(function() { + server.close(); + }) + + beforeEach(function() { + try { fs.unlinkSync(file) } catch(e) { }; + }) + + describe('and a 404 response', function() { + + before(function() { + handler = function(req, res) { + res.writeHead(404, {'Content-Type': 'text/plain' }); + res.end(); + } + }) + + it('doesnt attempt to write a file', function(done) { + var spy = sinon.spy(fs, 'createWriteStream'); + send_request(file, function(err, resp) { + resp.statusCode.should.eql(404); + spy.called.should.eql(false); + spy.restore(); + done(); + }) + }) + + it('doesnt actually write a file', function(done) { + send_request(file, function(err, resp) { + resp.statusCode.should.eql(404); + fs.existsSync(file).should.eql(false); + done(); + }) + }) + + }) + + describe('and a 200 response', function() { + + describe('for an empty response', function() { + + before(function() { + handler = function(req, res) { + res.writeHead(200, { 'Content-Type': 'text/plain' }); + res.end(); + } + }) + + it('uses a writableStream', function(done) { + var spy = sinon.spy(fs, 'createWriteStream'); + send_request(file, function(err, resp) { + resp.statusCode.should.eql(200); + spy.called.should.eql(true); + spy.restore(); + done(); + }) + }) + + it('writes a file', function(done) { + fs.existsSync(file).should.eql(false); + send_request(file, function(err, resp) { + fs.existsSync(file).should.eql(true); + done(); + }) + }) + + it('file is zero bytes in length', function(done) { + send_request(file, function(err, resp) { + fs.statSync(file).size.should.equal(0); + done(); + }) + }) + + if (process.platform != 'win32') { + it('closes the file descriptor', function(done) { + var open_descriptors = get_open_file_descriptors(); + send_request(file + Math.random(), function(err, resp) { + var current_descriptors = get_open_file_descriptors(); + open_descriptors.should.eql(current_descriptors); + done() + }) + }) + } + + }) + + describe('for a JSON response', function() { + + before(function() { + handler = function(req, res) { + res.writeHead(200, { 'Content-Type': 'application/javascript' }); + res.end(JSON.stringify({foo: 'bar'})); + } + }) + + it('uses a writableStream', function(done) { + var spy = sinon.spy(fs, 'createWriteStream'); + send_request(file, function(err, resp) { + resp.statusCode.should.eql(200); + spy.called.should.eql(true); + spy.restore(); + done(); + }) + }) + + it('writes a file', function(done) { + fs.existsSync(file).should.eql(false); + send_request(file, function(err, resp) { + fs.existsSync(file).should.eql(true); + done(); + }) + }) + + it('file size equals response length', function(done) { + send_request(file, function(err, resp) { + fs.statSync(file).size.should.equal(resp.bytes); + done(); + }) + }) + + it('response pipeline is honoured (JSON is decoded by default)', function(done) { + send_request_stream(file, function(err, resp) { + // we need to wait a bit since writing to config.output + // happens independently of needle's callback logic. + setTimeout(function() { + fs.readFileSync(file).toString().should.eql('{\"foo\":\"bar\"}'); + done(); + }, 20); + }) + }) + + it('closes the file descriptor', function(done) { + var open_descriptors = get_open_file_descriptors(); + send_request(file + Math.random(), function(err, resp) { + var current_descriptors = get_open_file_descriptors(); + open_descriptors.should.eql(current_descriptors); + done() + }) + }) + + }) + + describe('for a binary file', function() { + + var pixel = Buffer.from("base64,R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs", "base64"); + + before(function() { + handler = function(req, res) { + res.writeHead(200, { 'Content-Type': 'application/octet-stream', 'Transfer-Encoding': 'chunked' }); + res.write(pixel.slice(0, 10)); + res.write(pixel.slice(10, 20)); + res.write(pixel.slice(20, 30)); + res.write(pixel.slice(30)); + res.end(); + } + }) + + it('uses a writableStream', function(done) { + var spy = sinon.spy(fs, 'createWriteStream'); + send_request(file, function(err, resp) { + resp.statusCode.should.eql(200); + spy.called.should.eql(true); + spy.restore(); + done(); + }) + }) + + it('writes a file', function(done) { + fs.existsSync(file).should.eql(false); + send_request(file, function(err, resp) { + fs.existsSync(file).should.eql(true); + done(); + }) + }) + + it('file size equals response length', function(done) { + send_request(file, function(err, resp) { + fs.statSync(file).size.should.equal(resp.bytes); + done(); + }) + }) + + it('file is equal to original buffer', function(done) { + send_request(file, function(err, resp) { + // we need to wait a bit since writing to config.output + // happens independently of needle's callback logic. + setTimeout(function() { + fs.readFileSync(file).should.eql(pixel); + done(); + }, 20); + }) + }) + + it('returns the data in resp.body too', function(done) { + send_request(file, function(err, resp) { + resp.body.should.eql(pixel); + done(); + }) + }) + + if (process.platform != 'win32') { + it('closes the file descriptor', function(done) { + var open_descriptors = get_open_file_descriptors(); + send_request(file + Math.random(), function(err, resp) { + var current_descriptors = get_open_file_descriptors(); + open_descriptors.should.eql(current_descriptors); + done() + }) + }) + } + + }) + + }) + +}) diff --git a/node_modules/needle/test/parsing_spec.js b/node_modules/needle/test/parsing_spec.js new file mode 100644 index 00000000..406a292d --- /dev/null +++ b/node_modules/needle/test/parsing_spec.js @@ -0,0 +1,494 @@ +var should = require('should'), + needle = require('./../'), + http = require('http'), + port = 11111, + server; + +describe('parsing', function(){ + + describe('when response is an JSON string', function(){ + + var json_string = '{"foo":"bar"}'; + + before(function(done){ + server = http.createServer(function(req, res) { + res.setHeader('Content-Type', 'application/json'); + res.end(json_string); + }).listen(port, done); + }); + + after(function(done){ + server.close(done); + }) + + describe('and parse option is not passed', function() { + + describe('with default parse_response', function() { + + before(function() { + needle.defaults().parse_response.should.eql('all') + }) + + it('should return object', function(done){ + needle.get('localhost:' + port, function(err, response, body){ + should.ifError(err); + body.should.have.property('foo', 'bar'); + done(); + }) + }) + + }) + + describe('and default parse_response is set to false', function() { + + it('does NOT return object when disabled using .defaults', function(done){ + needle.defaults({ parse_response: false }) + + needle.get('localhost:' + port, function(err, response, body) { + should.not.exist(err); + body.should.be.an.instanceof(Buffer) + body.toString().should.eql('{"foo":"bar"}'); + + needle.defaults({ parse_response: 'all' }); + done(); + }) + }) + + + }) + + }) + + describe('and parse option is true', function() { + + describe('and JSON is valid', function() { + + it('should return object', function(done) { + needle.get('localhost:' + port, { parse: true }, function(err, response, body){ + should.not.exist(err); + body.should.have.property('foo', 'bar') + done(); + }) + }) + + it('should have a .parser = json property', function(done) { + needle.get('localhost:' + port, { parse: true }, function(err, resp) { + should.not.exist(err); + resp.parser.should.eql('json'); + done(); + }) + }) + + }); + + describe('and response is empty', function() { + + var old_json_string; + + before(function() { + old_json_string = json_string; + json_string = ""; + }); + + after(function() { + json_string = old_json_string; + }); + + it('should return an empty string', function(done) { + needle.get('localhost:' + port, { parse: true }, function(err, resp) { + should.not.exist(err); + resp.body.should.equal(''); + done(); + }) + }) + + }) + + describe('and JSON is invalid', function() { + + var old_json_string; + + before(function() { + old_json_string = json_string; + json_string = "this is not going to work"; + }); + + after(function() { + json_string = old_json_string; + }); + + it('does not throw', function(done) { + (function(){ + needle.get('localhost:' + port, { parse: true }, done); + }).should.not.throw(); + }); + + it('does NOT return object', function(done) { + needle.get('localhost:' + port, { parse: true }, function(err, response, body) { + should.not.exist(err); + body.should.be.a.String; + body.toString().should.eql('this is not going to work'); + done(); + }) + }) + + }); + + }) + + describe('and parse option is false', function() { + + it('does NOT return object', function(done){ + needle.get('localhost:' + port, { parse: false }, function(err, response, body) { + should.not.exist(err); + body.should.be.an.instanceof(Buffer) + body.toString().should.eql('{"foo":"bar"}'); + done(); + }) + }) + + it('should NOT have a .parser = json property', function(done) { + needle.get('localhost:' + port, { parse: false }, function(err, resp) { + should.not.exist(err); + should.not.exist(resp.parser); + done(); + }) + }) + + }) + + describe('and parse option is "xml"', function() { + + it('does NOT return object', function(done){ + needle.get('localhost:' + port, { parse: 'xml' }, function(err, response, body) { + should.not.exist(err); + body.should.be.an.instanceof(Buffer) + body.toString().should.eql('{"foo":"bar"}'); + done(); + }) + }) + + it('should NOT have a .parser = json property', function(done) { + needle.get('localhost:' + port, { parse: 'xml' }, function(err, resp) { + should.not.exist(err); + should.not.exist(resp.parser); + done(); + }) + }) + + }) + + }); + + describe('when response is JSON \'false\'', function(){ + + var json_string = 'false'; + + before(function(done){ + server = http.createServer(function(req, res) { + res.setHeader('Content-Type', 'application/json'); + res.end(json_string); + }).listen(port, done); + }); + + after(function(done){ + server.close(done); + }) + + describe('and parse option is not passed', function() { + + it('should return object', function(done){ + needle.get('localhost:' + port, function(err, response, body){ + should.ifError(err); + body.should.equal(false); + done(); + }) + }) + + }) + + describe('and parse option is true', function() { + + describe('and JSON is valid', function() { + + it('should return object', function(done){ + needle.get('localhost:' + port, { parse: true }, function(err, response, body){ + should.not.exist(err); + body.should.equal(false) + done(); + }) + }) + + }); + + describe('and response is empty', function() { + + var old_json_string; + + before(function() { + old_json_string = json_string; + json_string = ""; + }); + + after(function() { + json_string = old_json_string; + }); + + it('should return an empty string', function(done) { + needle.get('localhost:' + port, { parse: true }, function(err, resp) { + should.not.exist(err); + resp.body.should.equal(''); + done(); + }) + }) + + }) + + describe('and JSON is invalid', function() { + + var old_json_string; + + before(function() { + old_json_string = json_string; + json_string = "this is not going to work"; + }); + + after(function() { + json_string = old_json_string; + }); + + it('does not throw', function(done) { + (function(){ + needle.get('localhost:' + port, { parse: true }, done); + }).should.not.throw(); + }); + + it('does NOT return object', function(done) { + needle.get('localhost:' + port, { parse: true }, function(err, response, body) { + should.not.exist(err); + body.should.be.a.String; + body.toString().should.eql('this is not going to work'); + done(); + }) + }) + + }); + + }) + + describe('and parse option is false', function() { + + it('does NOT return object', function(done){ + needle.get('localhost:' + port, { parse: false }, function(err, response, body) { + should.not.exist(err); + body.should.be.an.instanceof(Buffer) + body.toString().should.eql('false'); + done(); + }) + }) + + }) + + describe('and parse option is "xml"', function() { + + it('does NOT return object', function(done){ + needle.get('localhost:' + port, { parse: 'xml' }, function(err, response, body) { + should.not.exist(err); + body.should.be.an.instanceof(Buffer) + body.toString().should.eql('false'); + done(); + }) + }) + + }) + + + }); + + describe('when response is an invalid XML string', function(){ + + before(function(done){ + server = http.createServer(function(req, res) { + res.writeHeader(200, {'Content-Type': 'application/xml'}) + res.end("") + }).listen(port, done); + }); + + after(function(done){ + server.close(done); + }) + + describe('and parse_response is true', function(){ + + it('should return original string', function(done) { + needle.get('localhost:' + port, { parse_response: true }, function(err, response, body) { + should.not.exist(err); + body.should.eql('') + should.not.exist(body.name); + done(); + }) + }) + + it('should not have a .parser = xml property', function(done) { + needle.get('localhost:' + port, { parse_response: true }, function(err, resp) { + should.not.exist(err); + should.not.exist(resp.parser); + done(); + }) + }) + + }) + + describe('and parse response is false', function(){ + + it('should return valid object', function(done) { + needle.get('localhost:' + port, { parse_response: false }, function(err, response, body){ + should.not.exist(err); + body.toString().should.eql('') + done(); + }) + }) + + it('should not have a .parser property', function(done) { + needle.get('localhost:' + port, { parse_response: false }, function(err, resp) { + should.not.exist(err); + should.not.exist(resp.parser) + done(); + }) + }) + + }) + + }) + + describe('when response is a valid XML string', function(){ + + before(function(done) { + server = http.createServer(function(req, res) { + res.writeHeader(200, {'Content-Type': 'application/xml'}) + res.end("

hello

world

") + }).listen(port, done); + }); + + after(function(done) { + server.close(done); + }) + + describe('and parse_response is true', function(){ + + it('should return valid object', function(done) { + needle.get('localhost:' + port, { parse_response: true }, function(err, response, body) { + should.not.exist(err); + body.name.should.eql('post') + body.children[0].name.should.eql('p') + body.children[0].value.should.eql('hello') + + body.children[1].name.should.eql('p') + body.children[1].value.should.eql('world') + done(); + }) + }) + + it('should have a .parser = xml property', function(done) { + needle.get('localhost:' + port, { parse_response: true }, function(err, resp) { + should.not.exist(err); + resp.parser.should.eql('xml'); + done(); + }) + }) + + }) + + describe('and parse response is false', function(){ + + it('should return valid object', function(done) { + needle.get('localhost:' + port, { parse_response: false }, function(err, response, body){ + should.not.exist(err); + body.toString().should.eql('

hello

world

') + done(); + }) + }) + + it('should not have a .parser property', function(done) { + needle.get('localhost:' + port, { parse_response: false }, function(err, resp) { + should.not.exist(err); + should.not.exist(resp.parser) + done(); + }) + }) + + }) + + }) + + + describe('valid XML, using xml2js', function() { + + var parsers, origParser; + + before(function(done) { + var xml2js = require('xml2js') + parsers = require('../lib/parsers'); + origParser = parsers['application/xml']; + + var customParser = require('xml2js').parseString; + parsers.use('xml2js', ['application/xml'], function(buff, cb) { + var opts = { explicitRoot: true, explicitArray: false }; + customParser(buff, opts, cb); + }) + + server = http.createServer(function(req, res) { + res.writeHeader(200, {'Content-Type': 'application/xml'}) + res.end("

hello

world

") + }).listen(port, done); + }); + + after(function(done) { + parsers['application/xml'] = origParser; + server.close(done); + }) + + describe('and parse_response is true', function(){ + + it('should return valid object', function(done) { + needle.get('localhost:' + port, { parse_response: true }, function(err, response, body) { + should.not.exist(err); + body.should.eql({ post: { p: ['hello', 'world' ]}}) + done(); + }) + }) + + it('should have a .parser = xml property', function(done) { + needle.get('localhost:' + port, { parse_response: true }, function(err, resp) { + should.not.exist(err); + resp.parser.should.eql('xml2js'); + done(); + }) + }) + + }) + + describe('and parse response is false', function(){ + + it('should return valid object', function(done) { + needle.get('localhost:' + port, { parse_response: false }, function(err, response, body){ + should.not.exist(err); + body.toString().should.eql('

hello

world

') + done(); + }) + }) + + it('should not have a .parser property', function(done) { + needle.get('localhost:' + port, { parse_response: false }, function(err, resp) { + should.not.exist(err); + should.not.exist(resp.parser) + done(); + }) + }) + + }) + + }) + + +}) diff --git a/node_modules/needle/test/post_data_spec.js b/node_modules/needle/test/post_data_spec.js new file mode 100644 index 00000000..d9a28c6d --- /dev/null +++ b/node_modules/needle/test/post_data_spec.js @@ -0,0 +1,1021 @@ +var needle = require('..'), + http = require('http'), + should = require('should'), + sinon = require('sinon'), + stream = require('stream'), + helpers = require('./helpers'); + +var multiparts = ['----------------------NODENEEDLEHTTPCLIENT']; +multiparts.push(['Content-Disposition: form-data; name=\"foo\"']) +multiparts.push(['\r\nbar\r\n----------------------NODENEEDLEHTTPCLIENT--']) +// multiparts.push(['Content-Disposition: form-data; name=\"test\"']) +// multiparts.push(['\r\næµè¯\r\n----------------------NODENEEDLEHTTPCLIENT--']) +// multiparts.push(['\r\n' + Buffer.from('测试').toString() + '\r\n----------------------NODENEEDLEHTTPCLIENT--']) + + +describe('post data (e.g. request body)', function() { + + var stub, spy, server; + + before(function(done) { + server = helpers.server({ port: 4321 }, done); + }) + + after(function(done) { + server.close(done); + }) + + afterEach(function() { + if (stub) stub.restore(); + if (spy) spy.restore(); + }) + + function get(data, opts, cb) { + return needle.request('get', 'http://localhost:' + 4321, data, opts, cb) + } + + function post(data, opts, cb) { + return needle.request('post', 'http://localhost:' + 4321, data, opts, cb) + } + + function spystub_request() { + var http_req = http.request; + stub = sinon.stub(http, 'request', function(opts, cb) { + var req = http_req(opts, cb); + spy = sinon.spy(req, 'write'); + return req; + }) + } + + function check_request(method) { + stub.calledOnce.should.be.true; + stub.args[0][0]['headers']['host'].should.equal('localhost:4321'); + stub.args[0][0]['method'].should.equal(method); + } + + describe('with multipart: true', function() { + + describe('when null', function() { + + it('sends request (non multipart)', function(done) { + spystub_request(); + + post(null, { multipart: true }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('doesnt set Content-Type header', function(done) { + post(null, { multipart: true }, function(err, resp) { + should.not.exist(resp.body.headers['content-type']); + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + post(null, { multipart: true }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('doesnt write anything', function(done) { + spystub_request(); + + post(null, { multipart: true }, function(err, resp) { + spy.called.should.be.false; + resp.body.body.should.eql(''); + done(); + }) + }) + + }) + + describe('when string', function() { + + it('explodes', function() { + (function() { + post('foobar', { multipart: true }) + }).should.throw() + }) + + }) + + describe('when object', function() { + + describe('get request', function() { + + it('sends request', function(done) { + spystub_request(); + + get({ foo: 'bar', test: '测试' }, { multipart: true }, function(err, resp) { + check_request('get'); + done(); + }) + }) + + it('sets Content-Type header', function(done) { + post({ foo: 'bar', test: '测试' }, { multipart: true }, function(err, resp) { + resp.body.headers['content-type'].should.equal('multipart/form-data; boundary=--------------------NODENEEDLEHTTPCLIENT'); + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + post({ foo: 'bar', test: '测试' }, { multipart: true }, function(err, resp) { + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('writes string as buffer', function(done) { + spystub_request(); + + get({ foo: 'bar' }, { multipart: true }, function(err, resp) { + spy.called.should.be.true; + + spy.args[0][0].should.be.an.instanceof(String); + spy.args[0][0].toString().should.equal(multiparts.join('\r\n')); + resp.body.body.should.eql(multiparts.join('\r\n')); + done(); + }) + }) + + it('writes japanese chars correctly as binary', function(done) { + spystub_request(); + + get({ foo: 'bar', test: '测试' }, { multipart: true }, function(err, resp) { + spy.called.should.be.true; + + spy.args[0][0].should.be.an.instanceof(String); + Buffer.from(spy.args[0][0]).toString('hex').should.eql('2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d4e4f44454e4545444c4548545450434c49454e540d0a436f6e74656e742d446973706f736974696f6e3a20666f726d2d646174613b206e616d653d22666f6f220d0a0d0a6261720d0a2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d4e4f44454e4545444c4548545450434c49454e540d0a436f6e74656e742d446973706f736974696f6e3a20666f726d2d646174613b206e616d653d2274657374220d0a0d0ac3a6c2b5c28bc3a8c2afc2950d0a2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d4e4f44454e4545444c4548545450434c49454e542d2d') + done(); + }) + }) + + + }) + + describe('post request', function() { + + it('sends request', function(done) { + spystub_request(); + + post({ foo: 'bar', test: '测试' }, { multipart: true }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('writes string as buffer', function(done) { + spystub_request(); + + post({ foo: 'bar' }, { multipart: true }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].should.be.an.instanceof(String); + spy.args[0][0].toString().should.equal(multiparts.join('\r\n')); + resp.body.body.should.eql(multiparts.join('\r\n')); + done(); + }) + }) + + it('writes japanese chars correctly as binary', function(done) { + spystub_request(); + + post({ foo: 'bar', test: '测试' }, { multipart: true }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].should.be.an.instanceof(String); + Buffer.from(spy.args[0][0]).toString('hex').should.eql('2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d4e4f44454e4545444c4548545450434c49454e540d0a436f6e74656e742d446973706f736974696f6e3a20666f726d2d646174613b206e616d653d22666f6f220d0a0d0a6261720d0a2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d4e4f44454e4545444c4548545450434c49454e540d0a436f6e74656e742d446973706f736974696f6e3a20666f726d2d646174613b206e616d653d2274657374220d0a0d0ac3a6c2b5c28bc3a8c2afc2950d0a2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d4e4f44454e4545444c4548545450434c49454e542d2d') + done(); + }) + }) + + }) + + }) + + describe('when stream', function() { + + var stream_for_multipart; + + before(function() { + stream_for_multipart = new stream.Readable(); + stream_for_multipart._read = function() { + this.push('foobar'); + this.push(null); + } + }) + + it('explodes', function() { + (function() { + post(stream_for_multipart, { multipart: true }) + }).should.throw() + }) + + }) + + }) + + describe('non multipart', function() { + + describe('when null', function() { + + describe('get request', function() { + + it('sends request', function(done) { + spystub_request(); + + get(null, {}, function(err, resp) { + check_request('get'); + done(); + }) + }) + + it('doesnt write anything', function(done) { + spystub_request(); + + get(null, {}, function(err, resp) { + spy.called.should.be.false; + resp.body.body.should.eql(''); + done(); + }) + }) + + }) + + describe('post request', function() { + + it('sends request', function(done) { + spystub_request(); + + post(null, {}, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('doesnt write anything', function(done) { + spystub_request(); + + post(null, {}, function(err, resp) { + spy.called.should.be.false; + resp.body.body.should.eql(''); + done(); + }) + }) + + }) + + }) + + describe('when string with no equal sign', function() { + + describe('get request', function() { + + it('explodes', function() { + (function() { + get('foobar', {}) + }).should.throw() + }) + + }) + + describe('post request', function() { + + it('sends request', function(done) { + spystub_request(); + + post('foobar', {}, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('writes string as buffer', function(done) { + spystub_request(); + + post('foobar', {}, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].should.be.an.instanceof(Buffer); + spy.args[0][0].toString().should.equal('foobar'); + resp.body.body.should.eql('foobar'); + done(); + }) + }) + + }) + + }) + + describe('when string WITH equal sign', function() { + + describe('get request', function() { + + describe('with json: false (default)', function() { + + it('sends request, adding data as querystring', function(done) { + spystub_request(); + + get('foo=bar', { json: false }, function(err, resp) { + check_request('get'); + stub.args[0][0]['path'].should.equal('/?foo=bar') + done(); + }) + }) + + it('doesnt set Content-Type header', function(done) { + get('foo=bar', { json: false }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + should.not.exist(resp.body.headers['content-type']); + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + get('foo=bar', { json: false }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('doesnt write anything', function(done) { + get('foo=bar', { json: false }, function(err, resp) { + spy.called.should.be.false; + resp.body.body.should.eql(''); + done(); + }) + }) + + }) + + describe('with json: true', function() { + + it('sends request, without setting a querystring', function(done) { + spystub_request(); + + get('foo=bar', { json: true }, function(err, resp) { + check_request('get'); + stub.args[0][0]['path'].should.equal('/') + done(); + }) + }) + + it('sets Content-Type header', function(done) { + get('foo=bar', { json: true }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json; charset=utf-8'); + done(); + }) + }) + + it('set Accept header to application/json', function(done) { + get('foo=bar', { json: true }, function(err, resp) { + resp.body.headers['accept'].should.equal('application/json'); + done(); + }) + }) + + it('writes raw string (assuming it already is JSON, so no JSON.stringify)', function(done) { + get('foo=bar', { json: true }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].toString().should.eql('foo=bar') + resp.body.body.should.eql('foo=bar'); + done(); + }) + }) + + }) + + }) + + describe('post request', function() { + + describe('with json: false (default)', function() { + + it('sends request', function(done) { + spystub_request(); + + post('foo=bar', { json: false }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header to www-form-urlencoded', function(done) { + post('foo=bar', { json: false }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/x-www-form-urlencoded'); + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + post('foo=bar', { json: false }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('writes as buffer', function(done) { + post('foo=bar', { json: false }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].should.be.an.instanceof(Buffer); + spy.args[0][0].toString().should.equal('foo=bar'); + resp.body.body.should.eql('foo=bar'); + done(); + }) + }) + + }) + + describe('with json: true', function() { + + it('sends request', function(done) { + spystub_request(); + + post('foo=bar', { json: true }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header', function(done) { + post('foo=bar', { json: true }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json; charset=utf-8'); + done(); + }) + }) + + it('set Accept header to application/json', function(done) { + post('foo=bar', { json: true }, function(err, resp) { + resp.body.headers['accept'].should.equal('application/json'); + done(); + }) + }) + + it('writes raw string (assuming it already is JSON, so no JSON.stringify)', function(done) { + post('foo=bar', { json: true }, function(err, resp) { + spy.called.should.be.true; + var json = JSON.stringify('foo=bar'); + spy.args[0][0].toString().should.eql('foo=bar') + resp.body.body.should.eql('foo=bar'); + done(); + }) + }) + + }) + + }) + + }) + + describe('when object', function() { + + describe('get request', function() { + + describe('with json: false (default)', function() { + + it('sends request, adding data as querystring', function(done) { + spystub_request(); + + get({ foo: 'bar', test: '测试' }, { json: false }, function(err, resp) { + check_request('get'); + stub.args[0][0]['path'].should.equal('/?foo=bar&test=%E6%B5%8B%E8%AF%95') + done(); + }) + }) + + it('doesnt set Content-Type header', function(done) { + get({ foo: 'bar', test: '测试' }, { json: false }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + should.not.exist(resp.body.headers['content-type']); + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + get({ foo: 'bar', test: '测试' }, { json: false }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('doesnt write anything', function(done) { + get({ foo: 'bar', test: '测试' }, { json: false }, function(err, resp) { + spy.called.should.be.false; + resp.body.body.should.eql(''); + done(); + }) + }) + + }) + + describe('with json: true', function() { + + it('sends request, without setting a querystring', function(done) { + spystub_request(); + + get({ foo: 'bar', test: '测试' }, { json: true }, function(err, resp) { + check_request('get'); + stub.args[0][0]['path'].should.equal('/') + done(); + }) + }) + + it('sets Content-Type header', function(done) { + get({ foo: 'bar', test: '测试' }, { json: true }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json; charset=utf-8'); + done(); + }) + }) + + it('set Accept header to application/json', function(done) { + get({ foo: 'bar', test: '测试' }, { json: true }, function(err, resp) { + resp.body.headers['accept'].should.equal('application/json'); + done(); + }) + }) + + it('writes JSON.stringify version of object', function(done) { + get({ foo: 'bar', test: '测试' }, { json: true }, function(err, resp) { + spy.called.should.be.true; + var json = JSON.stringify({ foo: 'bar', test: '测试' }) + spy.args[0][0].toString().should.eql(json) + resp.body.body.should.eql(json); + done(); + }) + }) + + }) + + }) + + describe('post request', function() { + + describe('with json: false (default)', function() { + + it('sends request', function(done) { + spystub_request(); + + post({ foo: 'bar', test: '测试' }, { json: false }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header to www-form-urlencoded', function(done) { + post({ foo: 'bar', test: '测试' }, { json: false }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/x-www-form-urlencoded'); + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + post({ foo: 'bar', test: '测试' }, { json: false }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('writes as buffer', function(done) { + post({ foo: 'bar', test: '测试' }, { json: false }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].should.be.an.instanceof(Buffer); + spy.args[0][0].toString().should.equal('foo=bar&test=%E6%B5%8B%E8%AF%95'); + resp.body.body.should.eql('foo=bar&test=%E6%B5%8B%E8%AF%95'); + done(); + }) + }) + + }) + + describe('with json: false and content_type = "application/json"', function() { + + var opts = { json: false, content_type: 'application/json' }; + + it('sends request', function(done) { + spystub_request(); + + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header to application/json', function(done) { + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json'); + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('writes as buffer', function(done) { + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].constructor.name.should.eql('Buffer'); + spy.args[0][0].toString().should.equal('foo=bar&test=%E6%B5%8B%E8%AF%95'); + resp.body.body.should.eql('foo=bar&test=%E6%B5%8B%E8%AF%95'); + done(); + }) + }) + + }) + + describe('with json: undefined but content-type = application/json', function() { + + var opts = { headers: { 'content-type': 'application/json' } }; + + it('sends request', function(done) { + spystub_request(); + + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('doesnt change Content-Type header', function(done) { + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json'); + done(); + }) + }) + + it('leaves default Accept header', function(done) { + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('writes JSON.stringified object', function(done) { + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + spy.called.should.be.true; + var json = JSON.stringify({ foo: 'bar', test: '测试' }) + spy.args[0][0].toString().should.eql(json) + resp.body.body.should.eql(json); + done(); + }) + }) + }) + + describe('with json: true', function() { + + it('sends request', function(done) { + spystub_request(); + + post({ foo: 'bar', test: '测试' }, { json: true }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header', function(done) { + post({ foo: 'bar', test: '测试' }, { json: true }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json; charset=utf-8'); + done(); + }) + }) + + it('set Accept header to application/json', function(done) { + post({ foo: 'bar', test: '测试' }, { json: true }, function(err, resp) { + resp.body.headers['accept'].should.equal('application/json'); + done(); + }) + }) + + it('writes JSON.stringified object', function(done) { + post({ foo: 'bar', test: '测试' }, { json: true }, function(err, resp) { + spy.called.should.be.true; + var json = JSON.stringify({ foo: 'bar', test: '测试' }) + spy.args[0][0].toString().should.eql(json) + resp.body.body.should.eql(json); + done(); + }) + }) + + }) + + + describe('with json: true and content_type: */* (passed, not default)', function() { + + var opts = { json: true, accept: '*/*' }; + + it('sends request', function(done) { + spystub_request(); + + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header to application/json', function(done) { + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json; charset=utf-8'); + done(); + }) + }) + + it('respects Accept header set by user', function(done) { + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('writes JSON.stringified object', function(done) { + post({ foo: 'bar', test: '测试' }, opts, function(err, resp) { + spy.called.should.be.true; + var json = JSON.stringify({ foo: 'bar', test: '测试' }) + spy.args[0][0].toString().should.eql(json) + resp.body.body.should.eql(json); + done(); + }) + }) + + }) + + }) + + }) + + describe('when buffer', function() { + + describe('get request', function() { + + describe('with json: false (default)', function() { + + it('sends request', function(done) { + spystub_request(); + + get(Buffer.from('foobar'), { json: false }, function(err, resp) { + check_request('get'); + done(); + }) + }) + + it('sets Content-Type header', function(done) { + get(Buffer.from('foobar'), { json: false }, function(err, resp) { + // should.not.exist(resp.body.headers['content-type']); + resp.body.headers['content-type'].should.equal('application/x-www-form-urlencoded'); + + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + get(Buffer.from('foobar'), { json: false }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('writes as buffer', function(done) { + get(Buffer.from('foobar'), { json: false }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].should.be.an.instanceof(Buffer); + spy.args[0][0].toString().should.equal('foobar'); + resp.body.body.should.eql('foobar'); + done(); + }) + }) + + }) + + describe('with json: true', function() { + + it('sends request, without setting a querystring', function(done) { + spystub_request(); + + get(Buffer.from('foobar'), { json: true }, function(err, resp) { + check_request('get'); + done(); + }) + }) + + it('sets Content-Type header', function(done) { + get(Buffer.from('foobar'), { json: true }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json; charset=utf-8'); + done(); + }) + }) + + it('set Accept header to application/json', function(done) { + get(Buffer.from('foobar'), { json: true }, function(err, resp) { + resp.body.headers['accept'].should.equal('application/json'); + done(); + }) + }) + + it('writes JSON.stringify version of object', function(done) { + get(Buffer.from('foobar'), { json: true }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].toString().should.eql('foobar') + resp.body.body.should.eql('foobar'); + done(); + }) + }) + + }) + + }) + + describe('post request', function() { + + describe('with json: false (default)', function() { + + it('sends request', function(done) { + spystub_request(); + + post(Buffer.from('foobar'), { json: false }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header to www-form-urlencoded', function(done) { + post(Buffer.from('foobar'), { json: false }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/x-www-form-urlencoded'); + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + post(Buffer.from('foobar'), { json: false }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('writes as buffer', function(done) { + post(Buffer.from('foobar'), { json: false }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].should.be.an.instanceof(Buffer); + spy.args[0][0].toString().should.equal('foobar'); + resp.body.body.should.eql('foobar'); + done(); + }) + }) + + }) + + describe('with json: true', function() { + + it('sends request', function(done) { + spystub_request(); + + post(Buffer.from('foobar'), { json: true }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header', function(done) { + post(Buffer.from('foobar'), { json: true }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json; charset=utf-8'); + done(); + }) + }) + + it('set Accept header to application/json', function(done) { + post(Buffer.from('foobar'), { json: true }, function(err, resp) { + resp.body.headers['accept'].should.equal('application/json'); + done(); + }) + }) + + it('passes raw buffer (assuming its a JSON string beneath)', function(done) { + post(Buffer.from('foobar'), { json: true }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].toString().should.eql('foobar') + resp.body.body.should.eql('foobar'); + done(); + }) + }) + + }) + + }) + + }) + + describe('when stream', function() { + + var input_stream; + + beforeEach(function() { + input_stream = new stream.Readable(); + input_stream._read = function() { + this.push('foobar'); + this.push(null); + } + }) + + describe('get request', function() { + + it('explodes', function() { + (function() { + get(input_stream, {}) + }).should.throw() + }) + + }); + + describe('post request', function() { + + describe('with json: false (default)', function() { + + it('sends request', function(done) { + spystub_request(); + + post(input_stream, { json: false }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header to www-form-urlencoded', function(done) { + post(input_stream, { json: false }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/x-www-form-urlencoded'); + done(); + }) + }) + + it('doesnt change default Accept header', function(done) { + post(input_stream, { json: false }, function(err, resp) { + // resp.body contains 'header' and 'body', mirroring what we sent + resp.body.headers['accept'].should.equal('*/*'); + done(); + }) + }) + + it('writes as buffer', function(done) { + post(input_stream, { json: false }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].should.be.an.instanceof(Buffer); + spy.args[0][0].toString().should.equal('foobar'); + resp.body.body.should.eql('foobar'); + done(); + }) + }) + + }) + + describe('with json: true', function() { + + it('sends request', function(done) { + spystub_request(); + + post(input_stream, { json: true }, function(err, resp) { + check_request('post'); + done(); + }) + }) + + it('sets Content-Type header', function(done) { + post(input_stream, { json: true }, function(err, resp) { + resp.body.headers['content-type'].should.equal('application/json; charset=utf-8'); + done(); + }) + }) + + it('set Accept header to application/json', function(done) { + post(input_stream, { json: true }, function(err, resp) { + resp.body.headers['accept'].should.equal('application/json'); + done(); + }) + }) + + it('writes JSON.stringified object', function(done) { + post(input_stream, { json: true }, function(err, resp) { + spy.called.should.be.true; + spy.args[0][0].toString().should.eql('foobar') + resp.body.body.should.eql('foobar'); + done(); + }) + }) + + }) + + }) + + }) + + }) + +}) diff --git a/node_modules/needle/test/proxy_spec.js b/node_modules/needle/test/proxy_spec.js new file mode 100644 index 00000000..9526e7f7 --- /dev/null +++ b/node_modules/needle/test/proxy_spec.js @@ -0,0 +1,202 @@ +var helpers = require('./helpers'), + should = require('should'), + sinon = require('sinon'), + http = require('http'), + needle = require('./../'); + +var port = 7707; +var url = 'localhost:' + port; +var nonexisting_host = 'awepfokawepofawe.com'; + +describe('proxy option', function() { + + var spy, opts; + + function send_request(opts, done) { + if (spy) spy.restore(); + spy = sinon.spy(http, 'request'); + needle.get(url, opts, done); + } + + ////////////////////// + // proxy opts helpers + + function not_proxied(done) { + return function(err, resp) { + var path = spy.args[0][0].path; + path.should.eql('/'); // not the full original URI + spy.restore(); + done(); + } + } + + function proxied(host, port, done) { + return function(err, resp) { + var path = spy.args[0][0].path; + path.should.eql('http://' + url); // the full original URI + + var http_host = spy.args[0][0].host; + if (http_host) http_host.should.eql(host); + + var http_port = spy.args[0][0].port; + if (http_port) http_port.should.eql(port); + + spy.restore(); + done(); + } + } + + ////////////////////// + // auth helpers + + function get_auth(header) { + var token = header.split(/\s+/).pop(); + return token && Buffer.from(token, 'base64').toString().split(':'); + } + + function no_proxy_auth(done) { + return function(err, resp) { + var headers = spy.args[0][0].headers; + Object.keys(headers).should.not.containEql('proxy-authorization'); + done(); + } + } + + function header_set(name, user, pass, done) { + return function(err, resp) { + var headers = spy.args[0][0].headers; + var auth = get_auth(headers[name]); + auth[0].should.eql(user); + auth[1].should.eql(pass); + done(); + } + } + + function proxy_auth_set(user, pass, done) { + return header_set('proxy-authorization', user, pass, done); + } + + function basic_auth_set(user, pass, done) { + return header_set('authorization', user, pass, done); + } + + after(function() { + spy.restore(); + }) + + describe('when null proxy is passed', function() { + + it('does not proxy', function(done) { + send_request({ proxy: null }, not_proxied(done)) + }) + + describe('but defaults has been set', function() { + + before(function() { + needle.defaults({ proxy: 'foobar' }); + }) + + after(function() { + needle.defaults({ proxy: null }); + }) + + it('tries to proxy anyway', function(done) { + send_request({}, proxied('foobar', 80, done)) + }) + + }) + + }) + + describe('when weird string is passed', function() { + + it('tries to proxy anyway', function(done) { + send_request({ proxy: 'alfalfa' }, proxied('alfalfa', 80, done)) + }) + }) + + describe('when valid url is passed', function() { + + it('proxies request', function(done) { + send_request({ proxy: nonexisting_host + ':123/done' }, proxied(nonexisting_host, '123', done)) + }) + + it('does not set a Proxy-Authorization header', function(done) { + send_request({ proxy: nonexisting_host + ':123/done' }, no_proxy_auth(done)); + }) + + describe('and proxy url contains user:pass', function() { + + before(function() { + opts = { + proxy: 'http://mj:x@' + nonexisting_host + ':123/done' + } + }) + + it('proxies request', function(done) { + send_request(opts, proxied(nonexisting_host, '123', done)) + }) + + it('sets Proxy-Authorization header', function(done) { + send_request(opts, proxy_auth_set('mj', 'x', done)); + }) + + }) + + describe('and a proxy_user is passed', function() { + + before(function() { + opts = { + proxy: nonexisting_host + ':123', + proxy_user: 'someone', + proxy_pass: 'else' + } + }) + + it('proxies request', function(done) { + send_request(opts, proxied(nonexisting_host, '123', done)) + }) + + it('sets Proxy-Authorization header', function(done) { + send_request(opts, proxy_auth_set('someone', 'else', done)); + }) + + describe('and url also contains user:pass', function() { + + it('url user:pass wins', function(done) { + var opts = { + proxy: 'http://xxx:yyy@' + nonexisting_host + ':123', + proxy_user: 'someone', + proxy_pass: 'else' + } + + send_request(opts, proxy_auth_set('xxx', 'yyy', done)); + }) + + }) + + describe('and options.username is also present', function() { + + before(function() { + opts = { proxy_user: 'foobar', username: 'someone' }; + }) + + it('a separate Authorization header is set', function(done) { + var opts = { + proxy: nonexisting_host + ':123', + proxy_user: 'someone', + proxy_pass: 'else', + username: 'test', + password: 'X' + } + + send_request(opts, basic_auth_set('test', 'X', done)); + }) + + }) + + }) + + }) + +}) diff --git a/node_modules/needle/test/querystring_spec.js b/node_modules/needle/test/querystring_spec.js new file mode 100644 index 00000000..34c1748c --- /dev/null +++ b/node_modules/needle/test/querystring_spec.js @@ -0,0 +1,128 @@ +var should = require('should'), + stringify = require('../lib/querystring').build; + +describe('stringify', function() { + + describe('with null', function() { + + it('throws', function() { + (function() { + var res = stringify(null); + }).should.throw(); + }) + + }) + + describe('with a number', function() { + + it('throws', function() { + (function() { + var res = stringify(100); + }).should.throw(); + }) + + }) + + describe('with a string', function() { + + describe('that is empty', function() { + + it('throws', function() { + (function() { + var res = stringify(''); + }).should.throw(); + }) + + }) + + describe('that doesnt contain an equal sign', function() { + + it('throws', function() { + (function() { + var res = stringify('boomshagalaga'); + }).should.throw(); + }) + + }) + + describe('that contains an equal sign', function() { + + it('works', function() { + var res = stringify('hello=123'); + res.should.eql('hello=123'); + }) + + }) + + }) + + describe('with an array', function() { + + describe('with key val objects', function() { + + it('works', function() { + var res = stringify([ {foo: 'bar'} ]); + res.should.eql('foo=bar'); + }) + + }) + + describe('where all elements are strings with an equal sign', function() { + + it('works', function() { + var res = stringify([ 'bar=123', 'quux=' ]); + res.should.eql('bar=123&quux='); + }) + + }) + + describe('with random words', function() { + + it('throws', function() { + (function() { + var res = stringify(['hello', 'there']); + }).should.throw(); + }) + + }) + + describe('with integers', function() { + + it('throws', function() { + (function() { + var res = stringify([123, 432]); + }).should.throw(); + }) + + }) + + }) + + describe('with an object', function() { + + it('works', function() { + var res = stringify({ test: 100 }); + res.should.eql('test=100'); + }) + + describe('with object where val is an array', function() { + + it('works', function() { + var res = stringify({ foo: ['bar', 'baz'] }); + res.should.eql('foo[]=bar&foo[]=baz'); + }) + + }) + + describe('with object where val is an array of key val objects', function() { + + it('works', function() { + var res = stringify({ foo: [{'1': 'bar'}, {'2': 'baz'}] }); + res.should.eql('foo[][1]=bar&foo[][2]=baz'); + }) + + }) + + }) + +}) diff --git a/node_modules/needle/test/redirect_spec.js b/node_modules/needle/test/redirect_spec.js new file mode 100644 index 00000000..9399c987 --- /dev/null +++ b/node_modules/needle/test/redirect_spec.js @@ -0,0 +1,392 @@ +var helpers = require('./helpers'), + should = require('should'), + sinon = require('sinon'), + needle = require('./../'); + +var ports = { + http : 8888, + https : 9999 +} + +var protocols = { + http : require('http'), + https : require('https') +} + +var code = 301; +var location; // var to set the response location + +function response_code() { + return code; +} + +function response_headers() { + return { 'Content-Type': 'text/plain', 'Location': location } +} + +describe('redirects', function() { + + var spies = {}, + servers = {}; + + var current_protocol; + var hostname = require('os').hostname(); + + // open two servers, one that responds to a redirect + before(function(done) { + + var conf = { + port : ports.http, + code : response_code, + headers : response_headers + } + + servers.http = helpers.server(conf, function() { + conf.port = ports.https; + conf.protocol = 'https'; + servers.https = helpers.server(conf, done); + }); + }) + + after(function(done) { + servers.http.close(function() { + servers.https.close(done); + }); + }) + + var prots = {'http': 'https'}; + Object.keys(prots).forEach(function(protocol) { + + current_protocol = protocol; + var other_protocol = protocol == 'http' ? 'https' : 'http'; + + var opts, // each test will modify this + host = '127.0.0.1', + url = protocol + '://' + host + ':' + ports[protocol] + '/hello'; + + function send_request(opts, cb) { + opts.rejectUnauthorized = false; + // console.log(' -- sending request ' + url + ' -- redirect to ' + location); + needle.post(url, { foo: 'bar' }, opts, cb); + } + + function not_followed(done) { + send_request(opts, function(err, resp) { + resp.statusCode.should.eql(301); + if (current_protocol == 'http') { + spies.http.callCount.should.eql(1); // only original request + spies.https.callCount.should.eql(0); + } else { + spies.http.callCount.should.eql(0); + spies.https.callCount.should.eql(1); // only original request + } + done(); + }) + } + + function followed_same_protocol(done) { + send_request(opts, function(err, resp) { + // the original request plus the redirect one + spies[current_protocol].callCount.should.eql(2); + done(); + }) + + } + + function followed_other_protocol(done) { + send_request(opts, function(err, resp) { + // on new node versions, https.request calls http.request internally, + // so we need to amount for that additional call. + + var http_calls = protocols.http.Agent.defaultMaxSockets == Infinity ? 2 : 1; + + spies.http.callCount.should.eql(http_calls); // the one(s) from http.request + spies.https.callCount.should.eql(1); // the one from https.request (redirect) + done(); + }) + } + + // set a spy on [protocol].request + // so we can see how many times a request was made + before(function() { + spies.http = sinon.spy(protocols.http, 'request'); + spies.https = sinon.spy(protocols.https, 'request'); + }) + + // and make sure it is restored after each test + afterEach(function() { + spies.http.reset(); + spies.https.reset(); + }) + + after(function() { + spies.http.restore(); + spies.https.restore(); + }) + + describe('when overriding defaults', function() { + + before(function() { + needle.defaults({ follow_max: 10 }); + opts = {}; + }) + + after(function() { + // reset values to previous + needle.defaults({ follow_max: 0 }); + }) + + describe('and redirected to the same path on same host and protocol', function() { + before(function() { + location = url; + }) + it('does not follow redirect', not_followed); + }) + + describe('and redirected to the same path on same host and different protocol', function() { + before(function() { + location = url.replace(protocol, other_protocol).replace(ports[protocol], ports[other_protocol]); + }) + + it('follows redirect', followed_other_protocol); + }) + + describe('and redirected to a different path on same host, same protocol', function() { + before(function() { + location = url.replace('/hello', '/goodbye'); + }) + it('follows redirect', followed_same_protocol); + }) + + describe('and redirected to a different path on same host, different protocol', function() { + before(function() { + location = url.replace('/hello', '/goodbye').replace(protocol, other_protocol).replace(ports[protocol], ports[other_protocol]); + }) + it('follows redirect', followed_other_protocol); + }) + + describe('and redirected to same path on another host, same protocol', function() { + before(function() { + location = url.replace(host, hostname); + }) + it('follows redirect', followed_same_protocol); + }) + + describe('and redirected to same path on another host, different protocol', function() { + before(function() { + location = url.replace(host, hostname).replace(protocol, other_protocol).replace(ports[protocol], ports[other_protocol]); + }) + it('follows redirect', followed_other_protocol); + }) + + }) + + // false and null have the same result + var values = [false, null]; + values.forEach(function(value) { + + describe('when follow is ' + value, function() { + + before(function() { + opts = { follow: value }; + }) + + + + describe('and redirected to the same path on same host and protocol', function() { + before(function() { + location = url; + }) + + it('throws an error', function() { + (function() { + send_request(opts, function() { }); + }).should.throw; + }) + + }) + + }) + + }) + + describe('when follow is true', function() { + + before(function() { + opts = { follow: true }; + }) + + describe('and redirected to the same path on same host and protocol', function() { + before(function() { location = url }) + + it('throws an error', function() { + (function() { + send_request(opts, function() { }); + }).should.throw; + }) + + }) + + }) + + describe('when follow is > 0', function() { + + before(function() { + needle.defaults({ follow: 10 }); + }) + + after(function() { + needle.defaults({ follow: 0 }); + }) + + describe('when keep_method is false', function() { + + before(function() { + opts = { follow_keep_method: false }; + }) + + // defaults to follow host and protocol + describe('and redirected to the same path on same host and different protocol', function() { + + before(function() { + location = url.replace(protocol, other_protocol); + }) + + it('follows redirect', followed_other_protocol); + + it('sends a GET request with no data', function(done) { + send_request(opts, function(err, resp) { + spies.http.args[0][0].method.should.eql('GET'); + // spy.args[0][3].should.eql(null); + done(); + }) + }) + + }) + + }) + + describe('and set_referer is true', function() { + + before(function() { + opts = { follow_set_referer: true }; + }) + + // defaults to follow host and protocol + describe('and redirected to the same path on same host and different protocol', function() { + + before(function() { + location = url.replace(protocol, other_protocol); + }) + + it('follows redirect', followed_other_protocol); + + it('sets Referer header when following redirect', function(done) { + send_request(opts, function(err, resp) { + spies.http.args[0][0].headers['referer'].should.eql("http://" + host + ":8888/hello"); + // spies.http.args[0][3].should.eql({ foo: 'bar'}); + done(); + }) + }) + + }) + + }) + + describe('and keep_method is true', function() { + + before(function() { + opts = { follow_keep_method: true }; + }) + + // defaults to follow host and protocol + describe('and redirected to the same path on same host and different protocol', function() { + + before(function() { + location = url.replace(protocol, other_protocol); + }) + + it('follows redirect', followed_other_protocol); + + it('sends a POST request with the original data', function(done) { + send_request(opts, function(err, resp) { + spies.http.args[0][0].method.should.eql('post'); + // spies.http.args[0][3].should.eql({ foo: 'bar'}); + done(); + }) + }) + + }) + + }) + + describe('and if_same_host is false', function() { + + before(function() { + opts = { follow_if_same_host: false }; + }) + + // by default it will follow other protocols + describe('and redirected to same path on another domain, same protocol', function() { + before(function() { + location = url.replace(host, hostname); + }) + it('follows redirect', followed_same_protocol); + }) + + }) + + describe('and if_same_host is true', function() { + + before(function() { + opts = { follow_if_same_host: true }; + }) + + // by default it will follow other protocols + describe('and redirected to same path on another domain, same protocol', function() { + before(function() { + location = url.replace(host, hostname); + }) + + it('does not follow redirect', not_followed); + }) + + }) + + describe('and if_same_protocol is false', function() { + + before(function() { + opts = { follow_if_same_protocol: false }; + }) + + // by default it will follow other hosts + describe('and redirected to same path on another domain, different protocol', function() { + before(function() { + location = url.replace(host, hostname).replace(protocol, other_protocol).replace(ports[protocol], ports[other_protocol]); + }) + it('follows redirect', followed_other_protocol); + }) + + }) + + describe('and if_same_protocol is true', function() { + + before(function() { + opts = { follow_if_same_protocol: true }; + }) + + // by default it will follow other hosts + describe('and redirected to same path on another domain, different protocol', function() { + before(function() { + location = url.replace(host, hostname).replace(protocol, other_protocol).replace(ports[protocol], ports[other_protocol]); + }) + it('does not follow redirect', not_followed); + }) + + }) + + }) + + }) + +}); diff --git a/node_modules/needle/test/redirect_with_timeout.js b/node_modules/needle/test/redirect_with_timeout.js new file mode 100644 index 00000000..7e8e02e6 --- /dev/null +++ b/node_modules/needle/test/redirect_with_timeout.js @@ -0,0 +1,45 @@ +var should = require('should') +var needle = require('./../') + +describe('follow redirects when read_timeout is set', function () { + + it('clear timeout before following redirect', function (done) { + var opts = { + open_timeout: 1000, + read_timeout: 3000, + follow: 5, + user_agent: 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36' + } + + var timedOut = 0 + var redirects = 0 + + var timer = setTimeout(function () { + var hasRedirects = redirects > 0 + hasRedirects.should.equal(true) + done() + }, opts.read_timeout || 3000) + + var resp = needle.get('http://google.com/', opts, function (err, resp, body) { + var noErr = err === null + var hasBody = body.length > 0 + noErr.should.equal(true); + hasBody.should.equal(true); + }); + + resp.on('redirect', function (location) { + redirects++ + // console.info(' Redirected to ', location) + }) + + resp.on('timeout', function (type) { + timedOut++ + timedOut.should.equal(0) + // console.error(' ', type, 'timeout') + clearTimeout(timer) + done() + }) + + }).timeout(30000) + +}) \ No newline at end of file diff --git a/node_modules/needle/test/request_stream_spec.js b/node_modules/needle/test/request_stream_spec.js new file mode 100644 index 00000000..1f02c8b5 --- /dev/null +++ b/node_modules/needle/test/request_stream_spec.js @@ -0,0 +1,202 @@ +var fs = require('fs'), + needle = require('..'), + stream = require('stream'), + http = require('http'), + should = require('should'), + sinon = require('sinon'); + +var port = 2233; + +describe('request stream length', function() { + + var server, writable; + + function createServer() { + return http.createServer(function(req, res) { + + req.on('data', function(chunk) { + // console.log(chunk.length); + }) + + req.on('end', function() { + res.writeHeader(200, { 'Content-Type': 'application/json'}) + res.end(JSON.stringify({ headers: req.headers })) + }) + + }) + } + + before(function(done) { + server = createServer(); + server.listen(port, done) + }) + + beforeEach(function() { + writable = new stream.Readable(); + writable._read = function() { + this.push('hello world'); + this.push(null); + } + }) + + after(function(done) { + server.close(done) + }) + + function send_request(opts, cb) { + needle.post('http://localhost:' + port, writable, opts, function(err, resp) { + cb(err, resp) + }) + } + + describe('no stream_length set', function() { + + it('doesnt set Content-Length header', function(done) { + send_request({}, function(err, resp) { + should.not.exist(resp.body.headers['content-length']); + done() + }) + }) + + it('doesnt work if Transfer-Encoding is set to a blank string', function(done) { + send_request({ headers: { 'Transfer-Encoding': '' }}, function(err, resp) { + err.code.should.eql('ECONNRESET'); + done() + }) + }) + + it('works if Transfer-Encoding is not set', function(done) { + send_request({}, function(err, resp) { + should.not.exist(err); + resp.statusCode.should.eql(200); + done() + }) + }) + + }) + + describe('stream_length set to invalid value', function() { + + it('sets Content-Length header to that value', function(done) { + send_request({ stream_length: 5 }, function(err, resp) { + should.exist(err); + err.code.should.eql('ECONNRESET'); + done() + }) + }) + + it('doesnt work if Transfer-Encoding is set to a blank string', function(done) { + send_request({ stream_length: 5, headers: { 'Transfer-Encoding': '' }}, function(err, resp) { + err.code.should.eql('ECONNRESET'); + done() + }) + }) + + it('doesnt work if Transfer-Encoding is not set', function(done) { + send_request({ stream_length: 5 }, function(err, resp) { + err.code.should.eql('ECONNRESET'); + done() + }) + }) + + }) + + describe('stream_length is set to valid value', function() { + + it('sets Content-Length header to that value', function(done) { + send_request({ stream_length: 11 }, function(err, resp) { + resp.body.headers['content-length'].should.eql('11'); + done() + }) + }) + + it('works if Transfer-Encoding is set to a blank string', function(done) { + send_request({ stream_length: 11, headers: { 'Transfer-Encoding': '' }}, function(err, resp) { + should.not.exist(err); + resp.statusCode.should.eql(200); + done() + }) + }) + + it('works if Transfer-Encoding is not set', function(done) { + send_request({ stream_length: 11 }, function(err, resp) { + should.not.exist(err); + resp.statusCode.should.eql(200); + done() + }) + }) + + }) + + + describe('stream_length set to 0', function() { + + describe('stream with path', function() { + + var stub; + + beforeEach(function() { + writable.path = '/foo/bar'; + stub = sinon.stub(fs, 'stat', function(path, cb) { + cb(null, { size: 11 }) + }) + }) + + afterEach(function() { + stub.restore(); + }) + + it('sets Content-Length header to streams length', function(done) { + send_request({ stream_length: 0 }, function(err, resp) { + resp.body.headers['content-length'].should.eql('11'); + done() + }) + }) + + it('works if Transfer-Encoding is set to a blank string', function(done) { + send_request({ stream_length: 0, headers: { 'Transfer-Encoding': '' }}, function(err, resp) { + should.not.exist(err); + resp.statusCode.should.eql(200); + done() + }) + }) + + it('works if Transfer-Encoding is not set', function(done) { + send_request({ stream_length: 0 }, function(err, resp) { + should.not.exist(err); + resp.statusCode.should.eql(200); + done() + }) + }) + + }) + + describe('stream without path', function() { + it('does not set Content-Length header', function(done) { + send_request({ stream_length: 0 }, function(err, resp) { + should.not.exist(resp.body.headers['content-length']); + done() + }) + }) + + it('doesnt work if Transfer-Encoding is set to a blank string', function(done) { + send_request({ stream_length: 0, headers: { 'Transfer-Encoding': '' }}, function(err, resp) { + err.code.should.eql('ECONNRESET'); + done() + }) + }) + + it('works if Transfer-Encoding is not set', function(done) { + send_request({ stream_length: 0 }, function(err, resp) { + should.not.exist(err); + resp.statusCode.should.eql(200); + done() + }) + }) + + }) + + + }) + +}) diff --git a/node_modules/needle/test/response_stream_spec.js b/node_modules/needle/test/response_stream_spec.js new file mode 100644 index 00000000..ef20e388 --- /dev/null +++ b/node_modules/needle/test/response_stream_spec.js @@ -0,0 +1,139 @@ +var should = require('should'), + needle = require('./../'), + http = require('http'), + stream = require('stream'), + fs = require('fs'), + port = 11111, + server; + +describe('response streams', function() { + + describe('when the server sends back json', function(){ + + before(function() { + server = http.createServer(function(req, res) { + res.setHeader('Content-Type', 'application/json') + res.end('{"foo":"bar"}') + }).listen(port); + }); + + after(function() { + server.close(); + }) + + describe('and the client uses streams', function(){ + + it('should create a proper streams2 stream', function(done) { + var stream = needle.get('localhost:' + port) + + // newer node versions set this to null instead of false + var bool = !!stream._readableState.flowing; + should.equal(false, bool); + + var readableCalled = false; + stream.on('readable', function() { + readableCalled = true; + }) + + stream.on('done', function() { + readableCalled.should.be.true; + done(); + }); + + stream.resume() + + }) + + it('emits a single data item which is our JSON object', function(done) { + var stream = needle.get('localhost:' + port) + + var chunks = []; + stream.on('readable', function () { + while (chunk = this.read()) { + chunk.should.be.an.Object; + chunks.push(chunk); + } + }) + + stream.on('done', function () { + chunks.should.have.length(1) + chunks[0].should.have.property('foo', 'bar'); + done(); + }); + }) + + it('emits a raw buffer if we do not want to parse JSON', function(done) { + var stream = needle.get('localhost:' + port, { parse: false }) + + var chunks = []; + stream.on('readable', function () { + while (chunk = this.read()) { + Buffer.isBuffer(chunk).should.be.true; + chunks.push(chunk); + } + }) + + stream.on('done', function() { + var body = Buffer.concat(chunks).toString(); + body.should.equal('{"foo":"bar"}') + done(); + }); + }) + + }) + }) + + describe('when the server sends back what was posted to it', function () { + var file = 'asdf.txt'; + + before(function(done){ + server = http.createServer(function(req, res) { + res.setHeader('Content-Type', 'application/octet') + req.pipe(res); + }).listen(port); + + fs.writeFile(file, 'contents of stream', done); + }); + + after(function(done){ + server.close(); + fs.unlink(file, done); + }) + + it('can PUT a stream', function (done) { + var stream = needle.put('localhost:' + port, fs.createReadStream(file), { stream: true }); + + var chunks = []; + stream.on('readable', function () { + while (chunk = this.read()) { + Buffer.isBuffer(chunk).should.be.true; + chunks.push(chunk); + } + }) + + stream.on('end', function () { + var body = Buffer.concat(chunks).toString(); + body.should.equal('contents of stream') + done(); + }); + }); + + it('can PATCH a stream', function (done) { + var stream = needle.patch('localhost:' + port, fs.createReadStream(file), { stream: true }); + + var chunks = []; + stream.on('readable', function () { + while (chunk = this.read()) { + Buffer.isBuffer(chunk).should.be.true; + chunks.push(chunk); + } + }) + + stream.on('end', function () { + var body = Buffer.concat(chunks).toString(); + body.should.equal('contents of stream') + done(); + }); + }); + }) +}) diff --git a/node_modules/needle/test/socket_pool_spec.js b/node_modules/needle/test/socket_pool_spec.js new file mode 100644 index 00000000..58ffa14f --- /dev/null +++ b/node_modules/needle/test/socket_pool_spec.js @@ -0,0 +1,66 @@ +var needle = require('../'), + should = require('should'), + http = require('http'); + +var server, port = 11112; + +describe('socket reuse', function() { + + before(function() { + server = http.createServer(function(req, res) { + res.setHeader('Content-Type', 'application/json'); + setTimeout(function() { + res.end('{"foo":"bar"}'); + }, 50); + }).listen(port); + }); + + after(function() { + server.close(); + }); + + describe('when sockets are reused', function() { + + var httpAgent = new http.Agent({ + keepAlive : true, + maxSockets : 1 + }); + + it('does not duplicate listeners on .end', function(done) { + + var last_error; + var count = 10; + + function completed(err) { + --count || done(last_error); + } + + function send() { + needle.get('localhost:' + port, { agent: httpAgent }, function(err, resp) { + if (err) + throw new Error("Unexpected error: " + err); + + // lets go through all sockets and inspect all socket objects + for (hostTarget in httpAgent.sockets) { + httpAgent.sockets[hostTarget].forEach(function(socket) { + // normally, there are 2 internal listeners and 1 needle sets up, + // but to be sure the test does not fail even if newer node versions + // introduce additional listeners, we use a higher limit. + try { + socket.listeners('end').length.should.be.below(5, "too many listeners on the socket object's end event"); + } catch (e) { + last_error = e; + } + }); + } + + completed(); + }); + } + + for (var i = 0; i < count; i++) { + send(); + } + }); + }); +}); diff --git a/node_modules/needle/test/url_spec.js b/node_modules/needle/test/url_spec.js new file mode 100644 index 00000000..8a0fa3e6 --- /dev/null +++ b/node_modules/needle/test/url_spec.js @@ -0,0 +1,155 @@ +var needle = require('../'), + sinon = require('sinon'), + should = require('should'), + http = require('http'), + helpers = require('./helpers'); + +var port = 3456; + +describe('urls', function() { + + var server, url; + + function send_request(cb) { + return needle.get(url, cb); + } + + before(function(done){ + server = helpers.server({ port: port }, done); + }) + + after(function(done) { + server.close(done); + }) + + describe('null URL', function(){ + + it('throws', function(){ + (function() { + send_request() + }).should.throw(); + }) + + }) + + describe('invalid protocol', function(){ + + before(function() { + url = 'foo://google.com/what' + }) + + it('does not throw', function(done) { + (function() { + send_request(function(err) { + done(); + }) + }).should.not.throw() + }) + + it('returns an error', function(done) { + send_request(function(err) { + err.should.be.an.Error; + err.code.should.match(/ENOTFOUND|EADDRINFO|EAI_AGAIN/) + done(); + }) + }) + + }) + + describe('invalid host', function(){ + + before(function() { + url = 'http://s1\\\2.com/' + }) + + it('fails', function(done) { + (function() { + send_request(function(){ }) + }.should.throw(TypeError)) + done() + }) + + }) + +/* + describe('invalid path', function(){ + + before(function() { + url = 'http://www.google.com\\\/x\\\ %^&*() /x2.com/' + }) + + it('fails', function(done) { + send_request(function(err) { + err.should.be.an.Error; + done(); + }) + }) + + }) +*/ + + describe('valid protocol and path', function() { + + before(function() { + url = 'http://localhost:' + port + '/foo'; + }) + + it('works', function(done) { + send_request(function(err){ + should.not.exist(err); + done(); + }) + }) + + }) + + describe('no protocol but with slashes and valid path', function() { + + before(function() { + url = '//localhost:' + port + '/foo'; + }) + + it('works', function(done) { + send_request(function(err){ + should.not.exist(err); + done(); + }) + }) + + }) + + describe('no protocol nor slashes and valid path', function() { + + before(function() { + url = 'localhost:' + port + '/foo'; + }) + + it('works', function(done) { + send_request(function(err){ + should.not.exist(err); + done(); + }) + }) + + }) + + describe('double encoding', function() { + + var path = '/foo?email=' + encodeURIComponent('what-ever@Example.Com'); + + before(function() { + url = 'localhost:' + port + path + }); + + it('should not occur', function(done) { + send_request(function(err, res) { + should.not.exist(err); + should(res.req.path).be.exactly(path); + done(); + }); + + }); + + }) + +}) diff --git a/node_modules/needle/test/utils/formidable.js b/node_modules/needle/test/utils/formidable.js new file mode 100644 index 00000000..ba1d983e --- /dev/null +++ b/node_modules/needle/test/utils/formidable.js @@ -0,0 +1,17 @@ +var formidable = require('formidable'), + http = require('http'), + util = require('util'); + +var port = process.argv[2] || 8888; + +http.createServer(function(req, res) { + var form = new formidable.IncomingForm(); + form.parse(req, function(err, fields, files) { + res.writeHead(200, {'content-type': 'text/plain'}); + res.write('received upload:\n\n'); + console.log(util.inspect({fields: fields, files: files})) + res.end(util.inspect({fields: fields, files: files})); + }); +}).listen(port); + +console.log('HTTP server listening on port ' + port); \ No newline at end of file diff --git a/node_modules/needle/test/utils/proxy.js b/node_modules/needle/test/utils/proxy.js new file mode 100644 index 00000000..531bf493 --- /dev/null +++ b/node_modules/needle/test/utils/proxy.js @@ -0,0 +1,62 @@ +var http = require('http'), + https = require('https'), + url = require('url'); + +var port = 1234, + log = true, + request_auth = false; + +http.createServer(function(request, response) { + + console.log(request.headers); + console.log("Got request: " + request.url); + console.log("Forwarding request to " + request.headers['host']); + + if (request_auth) { + if (!request.headers['proxy-authorization']) { + response.writeHead(407, {'Proxy-Authenticate': 'Basic realm="proxy.com"'}) + return response.end('Hello.'); + } + } + + var remote = url.parse(request.url); + var protocol = remote.protocol == 'https:' ? https : http; + + var opts = { + host: request.headers['host'], + port: remote.port || (remote.protocol == 'https:' ? 443 : 80), + method: request.method, + path: remote.pathname, + headers: request.headers + } + + var proxy_request = protocol.request(opts, function(proxy_response){ + + proxy_response.on('data', function(chunk) { + if (log) console.log(chunk.toString()); + response.write(chunk, 'binary'); + }); + proxy_response.on('end', function() { + response.end(); + }); + + response.writeHead(proxy_response.statusCode, proxy_response.headers); + }); + + request.on('data', function(chunk) { + if (log) console.log(chunk.toString()); + proxy_request.write(chunk, 'binary'); + }); + + request.on('end', function() { + proxy_request.end(); + }); + +}).listen(port); + +process.on('uncaughtException', function(err){ + console.log('Uncaught exception!'); + console.log(err); +}); + +console.log("Proxy server listening on port " + port); diff --git a/node_modules/needle/test/utils/test.js b/node_modules/needle/test/utils/test.js new file mode 100644 index 00000000..8d58d70f --- /dev/null +++ b/node_modules/needle/test/utils/test.js @@ -0,0 +1,104 @@ +// TODO: write specs. :) + +var fs = require('fs'), + client = require('./../../'); + +process.env.DEBUG = true; + +var response_callback = function(err, resp, body){ + console.log(err); + if(resp) console.log("Got status code " + resp.statusCode) + console.log(body); +} + +function simple_head(){ + client.head('http://www.amazon.com', response_callback); +} + +function simple_get(){ + client.get('http://www.nodejs.org', response_callback); +} + +function proxy_get(){ + client.get('https://www.google.com/search?q=nodejs', {proxy: 'http://localhost:1234'}, response_callback); +} + +function auth_get(){ + client.get('https://www.twitter.com', {username: 'asd', password: '123'}, response_callback); +} + +function simple_post(url){ + + var data = { + foo: 'bar', + baz: { + nested: 'attribute' + } + } + + client.post(url, data, response_callback); + +} + +function multipart_post(url){ + + var filename = 'test_file.txt'; + var data = 'Plain text data.\nLorem ipsum dolor sit amet.\nBla bla bla.\n'; + fs.writeFileSync(filename, data); + + var black_pixel = Buffer.from("data:image/gif;base64,R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs=".replace(/^data:image\/\w+;base64,/, ""), "base64"); + + var data = { + foo: 'bar', + bar: 'baz', + nested: { + my_document: { file: filename, content_type: 'text/plain' }, + even: { + more: 'nesting' + } + }, + pixel: { filename: 'black_pixel.gif', buffer: black_pixel, content_type: 'image/gif' }, + field2: {value: JSON.stringify({"json":[ {"one":1}, {"two":2} ]}), content_type: 'application/json' } + } + + client.post(url, data, {multipart: true}, function(err, resp, body){ + + console.log(err); + console.log("Got status code " + resp.statusCode) + console.log(body); + fs.unlink(filename); + + }); + +} + +switch(process.argv[2]){ + case 'head': + simple_head(); + break; + case 'get': + simple_get(); + break; + case 'auth': + auth_get(); + break; + case 'proxy': + proxy_get(); + break; + case 'post': + simple_post(process.argv[3] || 'http://posttestserver.com/post.php'); + break; + case 'multipart': + multipart_post(process.argv[3] || 'http://posttestserver.com/post.php?dir=example'); + break; + case 'all': + simple_head(); + simple_get(); + auth_get(); + proxy_get(); + simple_post(process.argv[3] || 'http://posttestserver.com/post.php'); + multipart_post(process.argv[3] || 'http://posttestserver.com/post.php?dir=example'); + break; + default: + console.log("Usage: ./test.js [head|get|auth|proxy|multipart]") +} diff --git a/node_modules/negotiator/HISTORY.md b/node_modules/negotiator/HISTORY.md new file mode 100644 index 00000000..6d06c76a --- /dev/null +++ b/node_modules/negotiator/HISTORY.md @@ -0,0 +1,103 @@ +0.6.2 / 2019-04-29 +================== + + * Fix sorting charset, encoding, and language with extra parameters + +0.6.1 / 2016-05-02 +================== + + * perf: improve `Accept` parsing speed + * perf: improve `Accept-Charset` parsing speed + * perf: improve `Accept-Encoding` parsing speed + * perf: improve `Accept-Language` parsing speed + +0.6.0 / 2015-09-29 +================== + + * Fix including type extensions in parameters in `Accept` parsing + * Fix parsing `Accept` parameters with quoted equals + * Fix parsing `Accept` parameters with quoted semicolons + * Lazy-load modules from main entry point + * perf: delay type concatenation until needed + * perf: enable strict mode + * perf: hoist regular expressions + * perf: remove closures getting spec properties + * perf: remove a closure from media type parsing + * perf: remove property delete from media type parsing + +0.5.3 / 2015-05-10 +================== + + * Fix media type parameter matching to be case-insensitive + +0.5.2 / 2015-05-06 +================== + + * Fix comparing media types with quoted values + * Fix splitting media types with quoted commas + +0.5.1 / 2015-02-14 +================== + + * Fix preference sorting to be stable for long acceptable lists + +0.5.0 / 2014-12-18 +================== + + * Fix list return order when large accepted list + * Fix missing identity encoding when q=0 exists + * Remove dynamic building of Negotiator class + +0.4.9 / 2014-10-14 +================== + + * Fix error when media type has invalid parameter + +0.4.8 / 2014-09-28 +================== + + * Fix all negotiations to be case-insensitive + * Stable sort preferences of same quality according to client order + * Support Node.js 0.6 + +0.4.7 / 2014-06-24 +================== + + * Handle invalid provided languages + * Handle invalid provided media types + +0.4.6 / 2014-06-11 +================== + + * Order by specificity when quality is the same + +0.4.5 / 2014-05-29 +================== + + * Fix regression in empty header handling + +0.4.4 / 2014-05-29 +================== + + * Fix behaviors when headers are not present + +0.4.3 / 2014-04-16 +================== + + * Handle slashes on media params correctly + +0.4.2 / 2014-02-28 +================== + + * Fix media type sorting + * Handle media types params strictly + +0.4.1 / 2014-01-16 +================== + + * Use most specific matches + +0.4.0 / 2014-01-09 +================== + + * Remove preferred prefix from methods diff --git a/node_modules/negotiator/LICENSE b/node_modules/negotiator/LICENSE new file mode 100644 index 00000000..ea6b9e2e --- /dev/null +++ b/node_modules/negotiator/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2012-2014 Federico Romero +Copyright (c) 2012-2014 Isaac Z. Schlueter +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/negotiator/README.md b/node_modules/negotiator/README.md new file mode 100644 index 00000000..04a67ff7 --- /dev/null +++ b/node_modules/negotiator/README.md @@ -0,0 +1,203 @@ +# negotiator + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +An HTTP content negotiator for Node.js + +## Installation + +```sh +$ npm install negotiator +``` + +## API + +```js +var Negotiator = require('negotiator') +``` + +### Accept Negotiation + +```js +availableMediaTypes = ['text/html', 'text/plain', 'application/json'] + +// The negotiator constructor receives a request object +negotiator = new Negotiator(request) + +// Let's say Accept header is 'text/html, application/*;q=0.2, image/jpeg;q=0.8' + +negotiator.mediaTypes() +// -> ['text/html', 'image/jpeg', 'application/*'] + +negotiator.mediaTypes(availableMediaTypes) +// -> ['text/html', 'application/json'] + +negotiator.mediaType(availableMediaTypes) +// -> 'text/html' +``` + +You can check a working example at `examples/accept.js`. + +#### Methods + +##### mediaType() + +Returns the most preferred media type from the client. + +##### mediaType(availableMediaType) + +Returns the most preferred media type from a list of available media types. + +##### mediaTypes() + +Returns an array of preferred media types ordered by the client preference. + +##### mediaTypes(availableMediaTypes) + +Returns an array of preferred media types ordered by priority from a list of +available media types. + +### Accept-Language Negotiation + +```js +negotiator = new Negotiator(request) + +availableLanguages = ['en', 'es', 'fr'] + +// Let's say Accept-Language header is 'en;q=0.8, es, pt' + +negotiator.languages() +// -> ['es', 'pt', 'en'] + +negotiator.languages(availableLanguages) +// -> ['es', 'en'] + +language = negotiator.language(availableLanguages) +// -> 'es' +``` + +You can check a working example at `examples/language.js`. + +#### Methods + +##### language() + +Returns the most preferred language from the client. + +##### language(availableLanguages) + +Returns the most preferred language from a list of available languages. + +##### languages() + +Returns an array of preferred languages ordered by the client preference. + +##### languages(availableLanguages) + +Returns an array of preferred languages ordered by priority from a list of +available languages. + +### Accept-Charset Negotiation + +```js +availableCharsets = ['utf-8', 'iso-8859-1', 'iso-8859-5'] + +negotiator = new Negotiator(request) + +// Let's say Accept-Charset header is 'utf-8, iso-8859-1;q=0.8, utf-7;q=0.2' + +negotiator.charsets() +// -> ['utf-8', 'iso-8859-1', 'utf-7'] + +negotiator.charsets(availableCharsets) +// -> ['utf-8', 'iso-8859-1'] + +negotiator.charset(availableCharsets) +// -> 'utf-8' +``` + +You can check a working example at `examples/charset.js`. + +#### Methods + +##### charset() + +Returns the most preferred charset from the client. + +##### charset(availableCharsets) + +Returns the most preferred charset from a list of available charsets. + +##### charsets() + +Returns an array of preferred charsets ordered by the client preference. + +##### charsets(availableCharsets) + +Returns an array of preferred charsets ordered by priority from a list of +available charsets. + +### Accept-Encoding Negotiation + +```js +availableEncodings = ['identity', 'gzip'] + +negotiator = new Negotiator(request) + +// Let's say Accept-Encoding header is 'gzip, compress;q=0.2, identity;q=0.5' + +negotiator.encodings() +// -> ['gzip', 'identity', 'compress'] + +negotiator.encodings(availableEncodings) +// -> ['gzip', 'identity'] + +negotiator.encoding(availableEncodings) +// -> 'gzip' +``` + +You can check a working example at `examples/encoding.js`. + +#### Methods + +##### encoding() + +Returns the most preferred encoding from the client. + +##### encoding(availableEncodings) + +Returns the most preferred encoding from a list of available encodings. + +##### encodings() + +Returns an array of preferred encodings ordered by the client preference. + +##### encodings(availableEncodings) + +Returns an array of preferred encodings ordered by priority from a list of +available encodings. + +## See Also + +The [accepts](https://npmjs.org/package/accepts#readme) module builds on +this module and provides an alternative interface, mime type validation, +and more. + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/negotiator.svg +[npm-url]: https://npmjs.org/package/negotiator +[node-version-image]: https://img.shields.io/node/v/negotiator.svg +[node-version-url]: https://nodejs.org/en/download/ +[travis-image]: https://img.shields.io/travis/jshttp/negotiator/master.svg +[travis-url]: https://travis-ci.org/jshttp/negotiator +[coveralls-image]: https://img.shields.io/coveralls/jshttp/negotiator/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/negotiator?branch=master +[downloads-image]: https://img.shields.io/npm/dm/negotiator.svg +[downloads-url]: https://npmjs.org/package/negotiator diff --git a/node_modules/negotiator/index.js b/node_modules/negotiator/index.js new file mode 100644 index 00000000..8d4f6a22 --- /dev/null +++ b/node_modules/negotiator/index.js @@ -0,0 +1,124 @@ +/*! + * negotiator + * Copyright(c) 2012 Federico Romero + * Copyright(c) 2012-2014 Isaac Z. Schlueter + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Cached loaded submodules. + * @private + */ + +var modules = Object.create(null); + +/** + * Module exports. + * @public + */ + +module.exports = Negotiator; +module.exports.Negotiator = Negotiator; + +/** + * Create a Negotiator instance from a request. + * @param {object} request + * @public + */ + +function Negotiator(request) { + if (!(this instanceof Negotiator)) { + return new Negotiator(request); + } + + this.request = request; +} + +Negotiator.prototype.charset = function charset(available) { + var set = this.charsets(available); + return set && set[0]; +}; + +Negotiator.prototype.charsets = function charsets(available) { + var preferredCharsets = loadModule('charset').preferredCharsets; + return preferredCharsets(this.request.headers['accept-charset'], available); +}; + +Negotiator.prototype.encoding = function encoding(available) { + var set = this.encodings(available); + return set && set[0]; +}; + +Negotiator.prototype.encodings = function encodings(available) { + var preferredEncodings = loadModule('encoding').preferredEncodings; + return preferredEncodings(this.request.headers['accept-encoding'], available); +}; + +Negotiator.prototype.language = function language(available) { + var set = this.languages(available); + return set && set[0]; +}; + +Negotiator.prototype.languages = function languages(available) { + var preferredLanguages = loadModule('language').preferredLanguages; + return preferredLanguages(this.request.headers['accept-language'], available); +}; + +Negotiator.prototype.mediaType = function mediaType(available) { + var set = this.mediaTypes(available); + return set && set[0]; +}; + +Negotiator.prototype.mediaTypes = function mediaTypes(available) { + var preferredMediaTypes = loadModule('mediaType').preferredMediaTypes; + return preferredMediaTypes(this.request.headers.accept, available); +}; + +// Backwards compatibility +Negotiator.prototype.preferredCharset = Negotiator.prototype.charset; +Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets; +Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding; +Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings; +Negotiator.prototype.preferredLanguage = Negotiator.prototype.language; +Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages; +Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType; +Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes; + +/** + * Load the given module. + * @private + */ + +function loadModule(moduleName) { + var module = modules[moduleName]; + + if (module !== undefined) { + return module; + } + + // This uses a switch for static require analysis + switch (moduleName) { + case 'charset': + module = require('./lib/charset'); + break; + case 'encoding': + module = require('./lib/encoding'); + break; + case 'language': + module = require('./lib/language'); + break; + case 'mediaType': + module = require('./lib/mediaType'); + break; + default: + throw new Error('Cannot find module \'' + moduleName + '\''); + } + + // Store to prevent invoking require() + modules[moduleName] = module; + + return module; +} diff --git a/node_modules/negotiator/lib/charset.js b/node_modules/negotiator/lib/charset.js new file mode 100644 index 00000000..cdd01480 --- /dev/null +++ b/node_modules/negotiator/lib/charset.js @@ -0,0 +1,169 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredCharsets; +module.exports.preferredCharsets = preferredCharsets; + +/** + * Module variables. + * @private + */ + +var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/; + +/** + * Parse the Accept-Charset header. + * @private + */ + +function parseAcceptCharset(accept) { + var accepts = accept.split(','); + + for (var i = 0, j = 0; i < accepts.length; i++) { + var charset = parseCharset(accepts[i].trim(), i); + + if (charset) { + accepts[j++] = charset; + } + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse a charset from the Accept-Charset header. + * @private + */ + +function parseCharset(str, i) { + var match = simpleCharsetRegExp.exec(str); + if (!match) return null; + + var charset = match[1]; + var q = 1; + if (match[2]) { + var params = match[2].split(';') + for (var j = 0; j < params.length; j++) { + var p = params[j].trim().split('='); + if (p[0] === 'q') { + q = parseFloat(p[1]); + break; + } + } + } + + return { + charset: charset, + q: q, + i: i + }; +} + +/** + * Get the priority of a charset. + * @private + */ + +function getCharsetPriority(charset, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(charset, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the charset. + * @private + */ + +function specify(charset, spec, index) { + var s = 0; + if(spec.charset.toLowerCase() === charset.toLowerCase()){ + s |= 1; + } else if (spec.charset !== '*' ) { + return null + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s + } +} + +/** + * Get the preferred charsets from an Accept-Charset header. + * @public + */ + +function preferredCharsets(accept, provided) { + // RFC 2616 sec 14.2: no header = * + var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || ''); + + if (!provided) { + // sorted list of all charsets + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullCharset); + } + + var priorities = provided.map(function getPriority(type, index) { + return getCharsetPriority(type, accepts, index); + }); + + // sorted list of accepted charsets + return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full charset string. + * @private + */ + +function getFullCharset(spec) { + return spec.charset; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} diff --git a/node_modules/negotiator/lib/encoding.js b/node_modules/negotiator/lib/encoding.js new file mode 100644 index 00000000..8432cd77 --- /dev/null +++ b/node_modules/negotiator/lib/encoding.js @@ -0,0 +1,184 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredEncodings; +module.exports.preferredEncodings = preferredEncodings; + +/** + * Module variables. + * @private + */ + +var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/; + +/** + * Parse the Accept-Encoding header. + * @private + */ + +function parseAcceptEncoding(accept) { + var accepts = accept.split(','); + var hasIdentity = false; + var minQuality = 1; + + for (var i = 0, j = 0; i < accepts.length; i++) { + var encoding = parseEncoding(accepts[i].trim(), i); + + if (encoding) { + accepts[j++] = encoding; + hasIdentity = hasIdentity || specify('identity', encoding); + minQuality = Math.min(minQuality, encoding.q || 1); + } + } + + if (!hasIdentity) { + /* + * If identity doesn't explicitly appear in the accept-encoding header, + * it's added to the list of acceptable encoding with the lowest q + */ + accepts[j++] = { + encoding: 'identity', + q: minQuality, + i: i + }; + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse an encoding from the Accept-Encoding header. + * @private + */ + +function parseEncoding(str, i) { + var match = simpleEncodingRegExp.exec(str); + if (!match) return null; + + var encoding = match[1]; + var q = 1; + if (match[2]) { + var params = match[2].split(';'); + for (var j = 0; j < params.length; j++) { + var p = params[j].trim().split('='); + if (p[0] === 'q') { + q = parseFloat(p[1]); + break; + } + } + } + + return { + encoding: encoding, + q: q, + i: i + }; +} + +/** + * Get the priority of an encoding. + * @private + */ + +function getEncodingPriority(encoding, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(encoding, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the encoding. + * @private + */ + +function specify(encoding, spec, index) { + var s = 0; + if(spec.encoding.toLowerCase() === encoding.toLowerCase()){ + s |= 1; + } else if (spec.encoding !== '*' ) { + return null + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s + } +}; + +/** + * Get the preferred encodings from an Accept-Encoding header. + * @public + */ + +function preferredEncodings(accept, provided) { + var accepts = parseAcceptEncoding(accept || ''); + + if (!provided) { + // sorted list of all encodings + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullEncoding); + } + + var priorities = provided.map(function getPriority(type, index) { + return getEncodingPriority(type, accepts, index); + }); + + // sorted list of accepted encodings + return priorities.filter(isQuality).sort(compareSpecs).map(function getEncoding(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full encoding string. + * @private + */ + +function getFullEncoding(spec) { + return spec.encoding; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} diff --git a/node_modules/negotiator/lib/language.js b/node_modules/negotiator/lib/language.js new file mode 100644 index 00000000..62f737f0 --- /dev/null +++ b/node_modules/negotiator/lib/language.js @@ -0,0 +1,179 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredLanguages; +module.exports.preferredLanguages = preferredLanguages; + +/** + * Module variables. + * @private + */ + +var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/; + +/** + * Parse the Accept-Language header. + * @private + */ + +function parseAcceptLanguage(accept) { + var accepts = accept.split(','); + + for (var i = 0, j = 0; i < accepts.length; i++) { + var language = parseLanguage(accepts[i].trim(), i); + + if (language) { + accepts[j++] = language; + } + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse a language from the Accept-Language header. + * @private + */ + +function parseLanguage(str, i) { + var match = simpleLanguageRegExp.exec(str); + if (!match) return null; + + var prefix = match[1], + suffix = match[2], + full = prefix; + + if (suffix) full += "-" + suffix; + + var q = 1; + if (match[3]) { + var params = match[3].split(';') + for (var j = 0; j < params.length; j++) { + var p = params[j].split('='); + if (p[0] === 'q') q = parseFloat(p[1]); + } + } + + return { + prefix: prefix, + suffix: suffix, + q: q, + i: i, + full: full + }; +} + +/** + * Get the priority of a language. + * @private + */ + +function getLanguagePriority(language, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(language, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the language. + * @private + */ + +function specify(language, spec, index) { + var p = parseLanguage(language) + if (!p) return null; + var s = 0; + if(spec.full.toLowerCase() === p.full.toLowerCase()){ + s |= 4; + } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) { + s |= 2; + } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) { + s |= 1; + } else if (spec.full !== '*' ) { + return null + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s + } +}; + +/** + * Get the preferred languages from an Accept-Language header. + * @public + */ + +function preferredLanguages(accept, provided) { + // RFC 2616 sec 14.4: no header = * + var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || ''); + + if (!provided) { + // sorted list of all languages + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullLanguage); + } + + var priorities = provided.map(function getPriority(type, index) { + return getLanguagePriority(type, accepts, index); + }); + + // sorted list of accepted languages + return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full language string. + * @private + */ + +function getFullLanguage(spec) { + return spec.full; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} diff --git a/node_modules/negotiator/lib/mediaType.js b/node_modules/negotiator/lib/mediaType.js new file mode 100644 index 00000000..67309dd7 --- /dev/null +++ b/node_modules/negotiator/lib/mediaType.js @@ -0,0 +1,294 @@ +/** + * negotiator + * Copyright(c) 2012 Isaac Z. Schlueter + * Copyright(c) 2014 Federico Romero + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = preferredMediaTypes; +module.exports.preferredMediaTypes = preferredMediaTypes; + +/** + * Module variables. + * @private + */ + +var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/; + +/** + * Parse the Accept header. + * @private + */ + +function parseAccept(accept) { + var accepts = splitMediaTypes(accept); + + for (var i = 0, j = 0; i < accepts.length; i++) { + var mediaType = parseMediaType(accepts[i].trim(), i); + + if (mediaType) { + accepts[j++] = mediaType; + } + } + + // trim accepts + accepts.length = j; + + return accepts; +} + +/** + * Parse a media type from the Accept header. + * @private + */ + +function parseMediaType(str, i) { + var match = simpleMediaTypeRegExp.exec(str); + if (!match) return null; + + var params = Object.create(null); + var q = 1; + var subtype = match[2]; + var type = match[1]; + + if (match[3]) { + var kvps = splitParameters(match[3]).map(splitKeyValuePair); + + for (var j = 0; j < kvps.length; j++) { + var pair = kvps[j]; + var key = pair[0].toLowerCase(); + var val = pair[1]; + + // get the value, unwrapping quotes + var value = val && val[0] === '"' && val[val.length - 1] === '"' + ? val.substr(1, val.length - 2) + : val; + + if (key === 'q') { + q = parseFloat(value); + break; + } + + // store parameter + params[key] = value; + } + } + + return { + type: type, + subtype: subtype, + params: params, + q: q, + i: i + }; +} + +/** + * Get the priority of a media type. + * @private + */ + +function getMediaTypePriority(type, accepted, index) { + var priority = {o: -1, q: 0, s: 0}; + + for (var i = 0; i < accepted.length; i++) { + var spec = specify(type, accepted[i], index); + + if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) { + priority = spec; + } + } + + return priority; +} + +/** + * Get the specificity of the media type. + * @private + */ + +function specify(type, spec, index) { + var p = parseMediaType(type); + var s = 0; + + if (!p) { + return null; + } + + if(spec.type.toLowerCase() == p.type.toLowerCase()) { + s |= 4 + } else if(spec.type != '*') { + return null; + } + + if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) { + s |= 2 + } else if(spec.subtype != '*') { + return null; + } + + var keys = Object.keys(spec.params); + if (keys.length > 0) { + if (keys.every(function (k) { + return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase(); + })) { + s |= 1 + } else { + return null + } + } + + return { + i: index, + o: spec.i, + q: spec.q, + s: s, + } +} + +/** + * Get the preferred media types from an Accept header. + * @public + */ + +function preferredMediaTypes(accept, provided) { + // RFC 2616 sec 14.2: no header = */* + var accepts = parseAccept(accept === undefined ? '*/*' : accept || ''); + + if (!provided) { + // sorted list of all types + return accepts + .filter(isQuality) + .sort(compareSpecs) + .map(getFullType); + } + + var priorities = provided.map(function getPriority(type, index) { + return getMediaTypePriority(type, accepts, index); + }); + + // sorted list of accepted types + return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) { + return provided[priorities.indexOf(priority)]; + }); +} + +/** + * Compare two specs. + * @private + */ + +function compareSpecs(a, b) { + return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0; +} + +/** + * Get full type string. + * @private + */ + +function getFullType(spec) { + return spec.type + '/' + spec.subtype; +} + +/** + * Check if a spec has any quality. + * @private + */ + +function isQuality(spec) { + return spec.q > 0; +} + +/** + * Count the number of quotes in a string. + * @private + */ + +function quoteCount(string) { + var count = 0; + var index = 0; + + while ((index = string.indexOf('"', index)) !== -1) { + count++; + index++; + } + + return count; +} + +/** + * Split a key value pair. + * @private + */ + +function splitKeyValuePair(str) { + var index = str.indexOf('='); + var key; + var val; + + if (index === -1) { + key = str; + } else { + key = str.substr(0, index); + val = str.substr(index + 1); + } + + return [key, val]; +} + +/** + * Split an Accept header into media types. + * @private + */ + +function splitMediaTypes(accept) { + var accepts = accept.split(','); + + for (var i = 1, j = 0; i < accepts.length; i++) { + if (quoteCount(accepts[j]) % 2 == 0) { + accepts[++j] = accepts[i]; + } else { + accepts[j] += ',' + accepts[i]; + } + } + + // trim accepts + accepts.length = j + 1; + + return accepts; +} + +/** + * Split a string of parameters. + * @private + */ + +function splitParameters(str) { + var parameters = str.split(';'); + + for (var i = 1, j = 0; i < parameters.length; i++) { + if (quoteCount(parameters[j]) % 2 == 0) { + parameters[++j] = parameters[i]; + } else { + parameters[j] += ';' + parameters[i]; + } + } + + // trim parameters + parameters.length = j + 1; + + for (var i = 0; i < parameters.length; i++) { + parameters[i] = parameters[i].trim(); + } + + return parameters; +} diff --git a/node_modules/negotiator/package.json b/node_modules/negotiator/package.json new file mode 100644 index 00000000..0c7ff3c2 --- /dev/null +++ b/node_modules/negotiator/package.json @@ -0,0 +1,42 @@ +{ + "name": "negotiator", + "description": "HTTP content negotiation", + "version": "0.6.2", + "contributors": [ + "Douglas Christopher Wilson ", + "Federico Romero ", + "Isaac Z. Schlueter (http://blog.izs.me/)" + ], + "license": "MIT", + "keywords": [ + "http", + "content negotiation", + "accept", + "accept-language", + "accept-encoding", + "accept-charset" + ], + "repository": "jshttp/negotiator", + "devDependencies": { + "eslint": "5.16.0", + "eslint-plugin-markdown": "1.0.0", + "mocha": "6.1.4", + "nyc": "14.0.0" + }, + "files": [ + "lib/", + "HISTORY.md", + "LICENSE", + "index.js", + "README.md" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test" + } +} diff --git a/node_modules/node-modules-regexp/index.js b/node_modules/node-modules-regexp/index.js new file mode 100644 index 00000000..3ce8f021 --- /dev/null +++ b/node_modules/node-modules-regexp/index.js @@ -0,0 +1,2 @@ +'use strict'; +module.exports = /^(?:.*[\\\/])?node_modules(?:[\\\/].*)?$/; diff --git a/node_modules/node-modules-regexp/license b/node_modules/node-modules-regexp/license new file mode 100644 index 00000000..ad5d021e --- /dev/null +++ b/node_modules/node-modules-regexp/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) James Talmage (github.com/jamestalmage) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/node-modules-regexp/package.json b/node_modules/node-modules-regexp/package.json new file mode 100644 index 00000000..fd7f4a70 --- /dev/null +++ b/node_modules/node-modules-regexp/package.json @@ -0,0 +1,44 @@ +{ + "name": "node-modules-regexp", + "version": "1.0.0", + "description": "A regular expression for file paths that contain a `node_modules` folder.", + "license": "MIT", + "repository": "jamestalmage/node-modules-regexp", + "author": { + "name": "James Talmage", + "email": "james@talmage.io", + "url": "github.com/jamestalmage" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "node_modules", + "regular expression", + "regular expressions", + "regular", + "expression", + "expressions", + "exclude", + "include", + "ignore", + "node", + "module" + ], + "dependencies": {}, + "devDependencies": { + "ava": "^0.7.0", + "xo": "^0.11.2" + }, + "xo": { + "ignores": [ + "test.js" + ] + } +} diff --git a/node_modules/node-modules-regexp/readme.md b/node_modules/node-modules-regexp/readme.md new file mode 100644 index 00000000..c0b28a52 --- /dev/null +++ b/node_modules/node-modules-regexp/readme.md @@ -0,0 +1,32 @@ +# node-modules-regexp [![Build Status](https://travis-ci.org/jamestalmage/node-modules-regexp.svg?branch=master)](https://travis-ci.org/jamestalmage/node-modules-regexp) + +> A regular expression for file paths that contain a `node_modules` folder. + + +## Install + +``` +$ npm install --save node-modules-regexp +``` + + +## Usage + +```js +const nodeModules = require('node-modules-regexp'); + +nodeModules.test('/foo/node_modules/bar.js'); +//=> true + +nodeModules.test('/foo/bar.js'); +//=> false +``` + + +## API + +The returned value is a regular expression, [soooo....](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp). + +## License + +MIT © [James Talmage](http://github.com/jamestalmage) diff --git a/node_modules/node-pre-gyp/CHANGELOG.md b/node_modules/node-pre-gyp/CHANGELOG.md new file mode 100644 index 00000000..bed8edc8 --- /dev/null +++ b/node_modules/node-pre-gyp/CHANGELOG.md @@ -0,0 +1,432 @@ +# node-pre-gyp changelog + +## 0.12.0 + +- Fixed double-build problem with node v10 (https://github.com/mapbox/node-pre-gyp/pull/428) +- Added node 11 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/422) + +## 0.11.0 + +- Fixed double-install problem with node v10 +- Significant N-API improvements (https://github.com/mapbox/node-pre-gyp/pull/405) + +## 0.10.3 + +- Now will use `request` over `needle` if request is installed. By default `needle` is used for `https`. This should unbreak proxy support that regressed in v0.9.0 + +## 0.10.2 + +- Fixed rc/deep-extent security vulnerability +- Fixed broken reinstall script do to incorrectly named get_best_napi_version + +## 0.10.1 + +- Fix needle error event (@medns) + +## 0.10.0 + +- Allow for a single-level module path when packing @allenluce (https://github.com/mapbox/node-pre-gyp/pull/371) +- Log warnings instead of errors when falling back @xzyfer (https://github.com/mapbox/node-pre-gyp/pull/366) +- Add Node.js v10 support to tests (https://github.com/mapbox/node-pre-gyp/pull/372) +- Remove retire.js from CI (https://github.com/mapbox/node-pre-gyp/pull/372) +- Remove support for Node.js v4 due to [EOL on April 30th, 2018](https://github.com/nodejs/Release/blob/7dd52354049cae99eed0e9fe01345b0722a86fde/schedule.json#L14) +- Update appveyor tests to install default NPM version instead of NPM v2.x for all Windows builds (https://github.com/mapbox/node-pre-gyp/pull/375) + +## 0.9.1 + +- Fixed regression (in v0.9.0) with support for http redirects @allenluce (https://github.com/mapbox/node-pre-gyp/pull/361) + +## 0.9.0 + +- Switched from using `request` to `needle` to reduce size of module deps (https://github.com/mapbox/node-pre-gyp/pull/350) + +## 0.8.0 + +- N-API support (@inspiredware) + +## 0.7.1 + +- Upgraded to tar v4.x + +## 0.7.0 + + - Updated request and hawk (#347) + - Dropped node v0.10.x support + +## 0.6.40 + + - Improved error reporting if an install fails + +## 0.6.39 + + - Support for node v9 + - Support for versioning on `{libc}` to allow binaries to work on non-glic linux systems like alpine linux + + +## 0.6.38 + + - Maintaining compatibility (for v0.6.x series) with node v0.10.x + +## 0.6.37 + + - Solved one part of #276: now now deduce the node ABI from the major version for node >= 2 even when not stored in the abi_crosswalk.json + - Fixed docs to avoid mentioning the deprecated and dangerous `prepublish` in package.json (#291) + - Add new node versions to crosswalk + - Ported tests to use tape instead of mocha + - Got appveyor tests passing by downgrading npm and node-gyp + +## 0.6.36 + + - Removed the running of `testbinary` during install. Because this was regressed for so long, it is too dangerous to re-enable by default. Developers needing validation can call `node-pre-gyp testbinary` directory. + - Fixed regression in v0.6.35 for electron installs (now skipping binary validation which is not yet supported for electron) + +## 0.6.35 + + - No longer recommending `npm ls` in `prepublish` (#291) + - Fixed testbinary command (#283) @szdavid92 + +## 0.6.34 + + - Added new node versions to crosswalk, including v8 + - Upgraded deps to latest versions, started using `^` instead of `~` for all deps. + +## 0.6.33 + + - Improved support for yarn + +## 0.6.32 + + - Honor npm configuration for CA bundles (@heikkipora) + - Add node-pre-gyp and npm versions to user agent (@addaleax) + - Updated various deps + - Add known node version for v7.x + +## 0.6.31 + + - Updated various deps + +## 0.6.30 + + - Update to npmlog@4.x and semver@5.3.x + - Add known node version for v6.5.0 + +## 0.6.29 + + - Add known node versions for v0.10.45, v0.12.14, v4.4.4, v5.11.1, and v6.1.0 + +## 0.6.28 + + - Now more verbose when remote binaries are not available. This is needed since npm is increasingly more quiet by default + and users need to know why builds are falling back to source compiles that might then error out. + +## 0.6.27 + + - Add known node version for node v6 + - Stopped bundling dependencies + - Documented method for module authors to avoid bundling node-pre-gyp + - See https://github.com/mapbox/node-pre-gyp/tree/master#configuring for details + +## 0.6.26 + + - Skip validation for nw runtime (https://github.com/mapbox/node-pre-gyp/pull/181) via @fleg + +## 0.6.25 + + - Improved support for auto-detection of electron runtime in `node-pre-gyp.find()` + - Pull request from @enlight - https://github.com/mapbox/node-pre-gyp/pull/187 + - Add known node version for 4.4.1 and 5.9.1 + +## 0.6.24 + + - Add known node version for 5.8.0, 5.9.0, and 4.4.0. + +## 0.6.23 + + - Add known node version for 0.10.43, 0.12.11, 4.3.2, and 5.7.1. + +## 0.6.22 + + - Add known node version for 4.3.1, and 5.7.0. + +## 0.6.21 + + - Add known node version for 0.10.42, 0.12.10, 4.3.0, and 5.6.0. + +## 0.6.20 + + - Add known node version for 4.2.5, 4.2.6, 5.4.0, 5.4.1,and 5.5.0. + +## 0.6.19 + + - Add known node version for 4.2.4 + +## 0.6.18 + + - Add new known node versions for 0.10.x, 0.12.x, 4.x, and 5.x + +## 0.6.17 + + - Re-tagged to fix packaging problem of `Error: Cannot find module 'isarray'` + +## 0.6.16 + + - Added known version in crosswalk for 5.1.0. + +## 0.6.15 + + - Upgraded tar-pack (https://github.com/mapbox/node-pre-gyp/issues/182) + - Support custom binary hosting mirror (https://github.com/mapbox/node-pre-gyp/pull/170) + - Added known version in crosswalk for 4.2.2. + +## 0.6.14 + + - Added node 5.x version + +## 0.6.13 + + - Added more known node 4.x versions + +## 0.6.12 + + - Added support for [Electron](http://electron.atom.io/). Just pass the `--runtime=electron` flag when building/installing. Thanks @zcbenz + +## 0.6.11 + + - Added known node and io.js versions including more 3.x and 4.x versions + +## 0.6.10 + + - Added known node and io.js versions including 3.x and 4.x versions + - Upgraded `tar` dep + +## 0.6.9 + + - Upgraded `rc` dep + - Updated known io.js version: v2.4.0 + +## 0.6.8 + + - Upgraded `semver` and `rimraf` deps + - Updated known node and io.js versions + +## 0.6.7 + + - Fixed `node_abi` versions for io.js 1.1.x -> 1.8.x (should be 43, but was stored as 42) (refs https://github.com/iojs/build/issues/94) + +## 0.6.6 + + - Updated with known io.js 2.0.0 version + +## 0.6.5 + + - Now respecting `npm_config_node_gyp` (https://github.com/npm/npm/pull/4887) + - Updated to semver@4.3.2 + - Updated known node v0.12.x versions and io.js 1.x versions. + +## 0.6.4 + + - Improved support for `io.js` (@fengmk2) + - Test coverage improvements (@mikemorris) + - Fixed support for `--dist-url` that regressed in 0.6.3 + +## 0.6.3 + + - Added support for passing raw options to node-gyp using `--` separator. Flags passed after + the `--` to `node-pre-gyp configure` will be passed directly to gyp while flags passed + after the `--` will be passed directly to make/visual studio. + - Added `node-pre-gyp configure` command to be able to call `node-gyp configure` directly + - Fix issue with require validation not working on windows 7 (@edgarsilva) + +## 0.6.2 + + - Support for io.js >= v1.0.2 + - Deferred require of `request` and `tar` to help speed up command line usage of `node-pre-gyp`. + +## 0.6.1 + + - Fixed bundled `tar` version + +## 0.6.0 + + - BREAKING: node odd releases like v0.11.x now use `major.minor.patch` for `{node_abi}` instead of `NODE_MODULE_VERSION` (#124) + - Added support for `toolset` option in versioning. By default is an empty string but `--toolset` can be passed to publish or install to select alternative binaries that target a custom toolset like C++11. For example to target Visual Studio 2014 modules like node-sqlite3 use `--toolset=v140`. + - Added support for `--no-rollback` option to request that a failed binary test does not remove the binary module leaves it in place. + - Added support for `--update-binary` option to request an existing binary be re-installed and the check for a valid local module be skipped. + - Added support for passing build options from `npm` through `node-pre-gyp` to `node-gyp`: `--nodedir`, `--disturl`, `--python`, and `--msvs_version` + +## 0.5.31 + + - Added support for deducing node_abi for node.js runtime from previous release if the series is even + - Added support for --target=0.10.33 + +## 0.5.30 + + - Repackaged with latest bundled deps + +## 0.5.29 + + - Added support for semver `build`. + - Fixed support for downloading from urls that include `+`. + +## 0.5.28 + + - Now reporting unix style paths only in reveal command + +## 0.5.27 + + - Fixed support for auto-detecting s3 bucket name when it contains `.` - @taavo + - Fixed support for installing when path contains a `'` - @halfdan + - Ported tests to mocha + +## 0.5.26 + + - Fix node-webkit support when `--target` option is not provided + +## 0.5.25 + + - Fix bundling of deps + +## 0.5.24 + + - Updated ABI crosswalk to incldue node v0.10.30 and v0.10.31 + +## 0.5.23 + + - Added `reveal` command. Pass no options to get all versioning data as json. Pass a second arg to grab a single versioned property value + - Added support for `--silent` (shortcut for `--loglevel=silent`) + +## 0.5.22 + + - Fixed node-webkit versioning name (NOTE: node-webkit support still experimental) + +## 0.5.21 + + - New package to fix `shasum check failed` error with v0.5.20 + +## 0.5.20 + + - Now versioning node-webkit binaries based on major.minor.patch - assuming no compatible ABI across versions (#90) + +## 0.5.19 + + - Updated to know about more node-webkit releases + +## 0.5.18 + + - Updated to know about more node-webkit releases + +## 0.5.17 + + - Updated to know about node v0.10.29 release + +## 0.5.16 + + - Now supporting all aws-sdk configuration parameters (http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html) (#86) + +## 0.5.15 + + - Fixed installation of windows packages sub directories on unix systems (#84) + +## 0.5.14 + + - Finished support for cross building using `--target_platform` option (#82) + - Now skipping binary validation on install if target arch/platform do not match the host. + - Removed multi-arch validing for OS X since it required a FAT node.js binary + +## 0.5.13 + + - Fix problem in 0.5.12 whereby the wrong versions of mkdirp and semver where bundled. + +## 0.5.12 + + - Improved support for node-webkit (@Mithgol) + +## 0.5.11 + + - Updated target versions listing + +## 0.5.10 + + - Fixed handling of `-debug` flag passed directory to node-pre-gyp (#72) + - Added optional second arg to `node_pre_gyp.find` to customize the default versioning options used to locate the runtime binary + - Failed install due to `testbinary` check failure no longer leaves behind binary (#70) + +## 0.5.9 + + - Fixed regression in `testbinary` command causing installs to fail on windows with 0.5.7 (#60) + +## 0.5.8 + + - Started bundling deps + +## 0.5.7 + + - Fixed the `testbinary` check, which is used to determine whether to re-download or source compile, to work even in complex dependency situations (#63) + - Exposed the internal `testbinary` command in node-pre-gyp command line tool + - Fixed minor bug so that `fallback_to_build` option is always respected + +## 0.5.6 + + - Added support for versioning on the `name` value in `package.json` (#57). + - Moved to using streams for reading tarball when publishing (#52) + +## 0.5.5 + + - Improved binary validation that also now works with node-webkit (@Mithgol) + - Upgraded test apps to work with node v0.11.x + - Improved test coverage + +## 0.5.4 + + - No longer depends on external install of node-gyp for compiling builds. + +## 0.5.3 + + - Reverted fix for debian/nodejs since it broke windows (#45) + +## 0.5.2 + + - Support for debian systems where the node binary is named `nodejs` (#45) + - Added `bin/node-pre-gyp.cmd` to be able to run command on windows locally (npm creates an .npm automatically when globally installed) + - Updated abi-crosswalk with node v0.10.26 entry. + +## 0.5.1 + + - Various minor bug fixes, several improving windows support for publishing. + +## 0.5.0 + + - Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`. + - Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18). + - Added `remote_path` which also supports versioning. + - Changed `remote_uri` to `host`. + +## 0.4.2 + + - Added support for `--target` flag to request cross-compile against a specific node/node-webkit version. + - Added preliminary support for node-webkit + - Fixed support for `--target_arch` option being respected in all cases. + +## 0.4.1 + + - Fixed exception when only stderr is available in binary test (@bendi / #31) + +## 0.4.0 + + - Enforce only `https:` based remote publishing access. + - Added `node-pre-gyp info` command to display listing of published binaries + - Added support for changing the directory node-pre-gyp should build in with the `-C/--directory` option. + - Added support for S3 prefixes. + +## 0.3.1 + + - Added `unpublish` command. + - Fixed module path construction in tests. + - Added ability to disable falling back to build behavior via `npm install --fallback-to-build=false` which overrides setting in a depedencies package.json `install` target. + +## 0.3.0 + + - Support for packaging all files in `module_path` directory - see `app4` for example + - Added `testpackage` command. + - Changed `clean` command to only delete `.node` not entire `build` directory since node-gyp will handle that. + - `.node` modules must be in a folder of there own since tar-pack will remove everything when it unpacks. diff --git a/node_modules/node-pre-gyp/LICENSE b/node_modules/node-pre-gyp/LICENSE new file mode 100644 index 00000000..8f5fce91 --- /dev/null +++ b/node_modules/node-pre-gyp/LICENSE @@ -0,0 +1,27 @@ +Copyright (c), Mapbox + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of node-pre-gyp nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/node-pre-gyp/README.md b/node_modules/node-pre-gyp/README.md new file mode 100644 index 00000000..088f2af6 --- /dev/null +++ b/node_modules/node-pre-gyp/README.md @@ -0,0 +1,693 @@ +# node-pre-gyp + +#### node-pre-gyp makes it easy to publish and install Node.js C++ addons from binaries + +[![NPM](https://nodei.co/npm/node-pre-gyp.png?downloads=true&downloadRank=true)](https://nodei.co/npm/node-pre-gyp/) + +[![Build Status](https://api.travis-ci.org/mapbox/node-pre-gyp.svg)](https://travis-ci.org/mapbox/node-pre-gyp) +[![Build status](https://ci.appveyor.com/api/projects/status/3nxewb425y83c0gv)](https://ci.appveyor.com/project/Mapbox/node-pre-gyp) +[![Dependencies](https://david-dm.org/mapbox/node-pre-gyp.svg)](https://david-dm.org/mapbox/node-pre-gyp) + +`node-pre-gyp` stands between [npm](https://github.com/npm/npm) and [node-gyp](https://github.com/Tootallnate/node-gyp) and offers a cross-platform method of binary deployment. + +### Features + + - A command line tool called `node-pre-gyp` that can install your package's C++ module from a binary. + - A variety of developer targeted commands for packaging, testing, and publishing binaries. + - A JavaScript module that can dynamically require your installed binary: `require('node-pre-gyp').find` + +For a hello world example of a module packaged with `node-pre-gyp` see and [the wiki ](https://github.com/mapbox/node-pre-gyp/wiki/Modules-using-node-pre-gyp) for real world examples. + +## Credits + + - The module is modeled after [node-gyp](https://github.com/Tootallnate/node-gyp) by [@Tootallnate](https://github.com/Tootallnate) + - Motivation for initial development came from [@ErisDS](https://github.com/ErisDS) and the [Ghost Project](https://github.com/TryGhost/Ghost). + - Development is sponsored by [Mapbox](https://www.mapbox.com/) + +## FAQ + +See the [Frequently Ask Questions](https://github.com/mapbox/node-pre-gyp/wiki/FAQ). + +## Depends + + - Node.js >= node v6.x + +## Install + +`node-pre-gyp` is designed to be installed as a local dependency of your Node.js C++ addon and accessed like: + + ./node_modules/.bin/node-pre-gyp --help + +But you can also install it globally: + + npm install node-pre-gyp -g + +## Usage + +### Commands + +View all possible commands: + + node-pre-gyp --help + +- clean - Remove the entire folder containing the compiled .node module +- install - Install pre-built binary for module +- reinstall - Run "clean" and "install" at once +- build - Compile the module by dispatching to node-gyp or nw-gyp +- rebuild - Run "clean" and "build" at once +- package - Pack binary into tarball +- testpackage - Test that the staged package is valid +- publish - Publish pre-built binary +- unpublish - Unpublish pre-built binary +- info - Fetch info on published binaries + +You can also chain commands: + + node-pre-gyp clean build unpublish publish info + +### Options + +Options include: + + - `-C/--directory`: run the command in this directory + - `--build-from-source`: build from source instead of using pre-built binary + - `--update-binary`: reinstall by replacing previously installed local binary with remote binary + - `--runtime=node-webkit`: customize the runtime: `node`, `electron` and `node-webkit` are the valid options + - `--fallback-to-build`: fallback to building from source if pre-built binary is not available + - `--target=0.4.0`: Pass the target node or node-webkit version to compile against + - `--target_arch=ia32`: Pass the target arch and override the host `arch`. Valid values are 'ia32','x64', or `arm`. + - `--target_platform=win32`: Pass the target platform and override the host `platform`. Valid values are `linux`, `darwin`, `win32`, `sunos`, `freebsd`, `openbsd`, and `aix`. + +Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. You can pass `--fallback-to-build=false` to override the option as declared in package.json. In addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module. + +For example: `npm install --build-from-source=myapp`. This is useful if: + + - `myapp` is referenced in the package.json of a larger app and therefore `myapp` is being installed as a dependency with `npm install`. + - The larger app also depends on other modules installed with `node-pre-gyp` + - You only want to trigger a source compile for `myapp` and the other modules. + +### Configuring + +This is a guide to configuring your module to use node-pre-gyp. + +#### 1) Add new entries to your `package.json` + + - Add `node-pre-gyp` to `dependencies` + - Add `aws-sdk` as a `devDependency` + - Add a custom `install` script + - Declare a `binary` object + +This looks like: + +```js + "dependencies" : { + "node-pre-gyp": "0.6.x" + }, + "devDependencies": { + "aws-sdk": "2.x" + } + "scripts": { + "install": "node-pre-gyp install --fallback-to-build" + }, + "binary": { + "module_name": "your_module", + "module_path": "./lib/binding/", + "host": "https://your_module.s3-us-west-1.amazonaws.com" + } +``` + +For a full example see [node-addon-examples's package.json](https://github.com/springmeyer/node-addon-example/blob/master/package.json). + +Let's break this down: + + - Dependencies need to list `node-pre-gyp` + - Your devDependencies should list `aws-sdk` so that you can run `node-pre-gyp publish` locally or a CI system. We recommend using `devDependencies` only since `aws-sdk` is large and not needed for `node-pre-gyp install` since it only uses http to fetch binaries + - Your `scripts` section should override the `install` target with `"install": "node-pre-gyp install --fallback-to-build"`. This allows node-pre-gyp to be used instead of the default npm behavior of always source compiling with `node-gyp` directly. + - Your package.json should contain a `binary` section describing key properties you provide to allow node-pre-gyp to package optimally. They are detailed below. + +Note: in the past we recommended putting `node-pre-gyp` in the `bundledDependencies`, but we no longer recommend this. In the past there were npm bugs (with node versions 0.10.x) that could lead to node-pre-gyp not being available at the right time during install (unless we bundled). This should no longer be the case. Also, for a time we recommended using `"preinstall": "npm install node-pre-gyp"` as an alternative method to avoid needing to bundle. But this did not behave predictably across all npm versions - see https://github.com/mapbox/node-pre-gyp/issues/260 for the details. So we do not recommend using `preinstall` to install `node-pre-gyp`. More history on this at https://github.com/strongloop/fsevents/issues/157#issuecomment-265545908. + +##### The `binary` object has three required properties + +###### module_name + +The name of your native node module. This value must: + + - Match the name passed to [the NODE_MODULE macro](http://nodejs.org/api/addons.html#addons_hello_world) + - Must be a valid C variable name (e.g. it cannot contain `-`) + - Should not include the `.node` extension. + +###### module_path + +The location your native module is placed after a build. This should be an empty directory without other Javascript files. This entire directory will be packaged in the binary tarball. When installing from a remote package this directory will be overwritten with the contents of the tarball. + +Note: This property supports variables based on [Versioning](#versioning). + +###### host + +A url to the remote location where you've published tarball binaries (must be `https` not `http`). + +It is highly recommended that you use Amazon S3. The reasons are: + + - Various node-pre-gyp commands like `publish` and `info` only work with an S3 host. + - S3 is a very solid hosting platform for distributing large files. + - We provide detail documentation for using [S3 hosting](#s3-hosting) with node-pre-gyp. + +Why then not require S3? Because while some applications using node-pre-gyp need to distribute binaries as large as 20-30 MB, others might have very small binaries and might wish to store them in a GitHub repo. This is not recommended, but if an author really wants to host in a non-S3 location then it should be possible. + +It should also be mentioned that there is an optional and entirely separate npm module called [node-pre-gyp-github](https://github.com/bchr02/node-pre-gyp-github) which is intended to complement node-pre-gyp and be installed along with it. It provides the ability to store and publish your binaries within your repositories GitHub Releases if you would rather not use S3 directly. Installation and usage instructions can be found [here](https://github.com/bchr02/node-pre-gyp-github), but the basic premise is that instead of using the ```node-pre-gyp publish``` command you would use ```node-pre-gyp-github publish```. + +##### The `binary` object has two optional properties + +###### remote_path + +It **is recommended** that you customize this property. This is an extra path to use for publishing and finding remote tarballs. The default value for `remote_path` is `""` meaning that if you do not provide it then all packages will be published at the base of the `host`. It is recommended to provide a value like `./{name}/v{version}` to help organize remote packages in the case that you choose to publish multiple node addons to the same `host`. + +Note: This property supports variables based on [Versioning](#versioning). + +###### package_name + +It is **not recommended** to override this property unless you are also overriding the `remote_path`. This is the versioned name of the remote tarball containing the binary `.node` module and any supporting files you've placed inside the `module_path` directory. Unless you specify `package_name` in your `package.json` then it defaults to `{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz` which allows your binary to work across node versions, platforms, and architectures. If you are using `remote_path` that is also versioned by `./{module_name}/v{version}` then you could remove these variables from the `package_name` and just use: `{node_abi}-{platform}-{arch}.tar.gz`. Then your remote tarball will be looked up at, for example, `https://example.com/your-module/v0.1.0/node-v11-linux-x64.tar.gz`. + +Avoiding the version of your module in the `package_name` and instead only embedding in a directory name can be useful when you want to make a quick tag of your module that does not change any C++ code. In this case you can just copy binaries to the new version behind the scenes like: + +```sh +aws s3 sync --acl public-read s3://mapbox-node-binary/sqlite3/v3.0.3/ s3://mapbox-node-binary/sqlite3/v3.0.4/ +``` + +Note: This property supports variables based on [Versioning](#versioning). + +#### 2) Add a new target to binding.gyp + +`node-pre-gyp` calls out to `node-gyp` to compile the module and passes variables along like [module_name](#module_name) and [module_path](#module_path). + +A new target must be added to `binding.gyp` that moves the compiled `.node` module from `./build/Release/module_name.node` into the directory specified by `module_path`. + +Add a target like this at the end of your `targets` list: + +```js + { + "target_name": "action_after_build", + "type": "none", + "dependencies": [ "<(module_name)" ], + "copies": [ + { + "files": [ "<(PRODUCT_DIR)/<(module_name).node" ], + "destination": "<(module_path)" + } + ] + } +``` + +For a full example see [node-addon-example's binding.gyp](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/binding.gyp). + +#### 3) Dynamically require your `.node` + +Inside the main js file that requires your addon module you are likely currently doing: + +```js +var binding = require('../build/Release/binding.node'); +``` + +or: + +```js +var bindings = require('./bindings') +``` + +Change those lines to: + +```js +var binary = require('node-pre-gyp'); +var path = require('path'); +var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json'))); +var binding = require(binding_path); +``` + +For a full example see [node-addon-example's index.js](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/index.js#L1-L4) + +#### 4) Build and package your app + +Now build your module from source: + + npm install --build-from-source + +The `--build-from-source` tells `node-pre-gyp` to not look for a remote package and instead dispatch to node-gyp to build. + +Now `node-pre-gyp` should now also be installed as a local dependency so the command line tool it offers can be found at `./node_modules/.bin/node-pre-gyp`. + +#### 5) Test + +Now `npm test` should work just as it did before. + +#### 6) Publish the tarball + +Then package your app: + + ./node_modules/.bin/node-pre-gyp package + +Once packaged, now you can publish: + + ./node_modules/.bin/node-pre-gyp publish + +Currently the `publish` command pushes your binary to S3. This requires: + + - You have installed `aws-sdk` with `npm install aws-sdk` + - You have created a bucket already. + - The `host` points to an S3 http or https endpoint. + - You have configured node-pre-gyp to read your S3 credentials (see [S3 hosting](#s3-hosting) for details). + +You can also host your binaries elsewhere. To do this requires: + + - You manually publish the binary created by the `package` command to an `https` endpoint + - Ensure that the `host` value points to your custom `https` endpoint. + +#### 7) Automate builds + +Now you need to publish builds for all the platforms and node versions you wish to support. This is best automated. + + - See [Appveyor Automation](#appveyor-automation) for how to auto-publish builds on Windows. + - See [Travis Automation](#travis-automation) for how to auto-publish builds on OS X and Linux. + +#### 8) You're done! + +Now publish your module to the npm registry. Users will now be able to install your module from a binary. + +What will happen is this: + +1. `npm install ` will pull from the npm registry +2. npm will run the `install` script which will call out to `node-pre-gyp` +3. `node-pre-gyp` will fetch the binary `.node` module and unpack in the right place +4. Assuming that all worked, you are done + +If a a binary was not available for a given platform and `--fallback-to-build` was used then `node-gyp rebuild` will be called to try to source compile the module. + +## N-API Considerations + +[N-API](https://nodejs.org/api/n-api.html#n_api_n_api) is an ABI-stable alternative to previous technologies such as [nan](https://github.com/nodejs/nan) which are tied to a specific Node runtime engine. N-API is Node runtime engine agnostic and guarantees modules created today will continue to run, without changes, into the future. + +Using `node-pre-gyp` with N-API projects requires a handful of additional configuration values and imposes some additional requirements. + +The most significant difference is that an N-API module can be coded to target multiple N-API versions. Therefore, an N-API module must declare in its `package.json` file which N-API versions the module is designed to run against. In addition, since multiple builds may be required for a single module, path and file names must be specified in way that avoids naming conflicts. + +### The `napi_versions` array property + +An N-API modules must declare in its `package.json` file, the N-API versions the module is intended to support. This is accomplished by including an `napi-versions` array property in the `binary` object. For example: + +```js +"binary": { + "module_name": "your_module", + "module_path": "your_module_path", + "host": "https://your_bucket.s3-us-west-1.amazonaws.com", + "napi_versions": [1,3] + } +``` + +If the `napi_versions` array property is *not* present, `node-pre-gyp` operates as it always has. Including the `napi_versions` array property instructs `node-pre-gyp` that this is a N-API module build. + +When the `napi_versions` array property is present, `node-pre-gyp` fires off multiple operations, one for each of the N-API versions in the array. In the example above, two operations are initiated, one for N-API version 1 and second for N-API version 3. How this version number is communicated is described next. + +### The `napi_build_version` value + +For each of the N-API module operations `node-pre-gyp` initiates, it ensures that the `napi_build_version` is set appropriately. + +This value is of importance in two areas: + +1. The C/C++ code which needs to know against which N-API version it should compile. +2. `node-pre-gyp` itself which must assign appropriate path and file names to avoid collisions. + +### Defining `NAPI_VERSION` for the C/C++ code + +The `napi_build_version` value is communicated to the C/C++ code by adding this code to the `binding.gyp` file: + +``` +"defines": [ + "NAPI_VERSION=<(napi_build_version)", +] +``` + +This ensures that `NAPI_VERSION`, an integer value, is declared appropriately to the C/C++ code for each build. + +> Note that earlier versions of this document recommended defining the symbol `NAPI_BUILD_VERSION`. `NAPI_VERSION` is prefered because it used by the N-API C/C++ headers to configure the specific N-API veriosn being requested. + +### Path and file naming requirements in `package.json` + +Since `node-pre-gyp` fires off multiple operations for each request, it is essential that path and file names be created in such a way as to avoid collisions. This is accomplished by imposing additional path and file naming requirements. + +Specifically, when performing N-API builds, the `{napi_build_version}` text configuration value *must* be present in the `module_path` property. In addition, the `{napi_build_version}` text configuration value *must* be present in either the `remote_path` or `package_name` property. (No problem if it's in both.) + +Here's an example: + +```js +"binary": { + "module_name": "your_module", + "module_path": "./lib/binding/napi-v{napi_build_version}", + "remote_path": "./{module_name}/v{version}/{configuration}/", + "package_name": "{platform}-{arch}-napi-v{napi_build_version}.tar.gz", + "host": "https://your_bucket.s3-us-west-1.amazonaws.com", + "napi_versions": [1,3] + } +``` + +## Supporting both N-API and NAN builds + +You may have a legacy native add-on that you wish to continue supporting for those versions of Node that do not support N-API, as you add N-API support for later Node versions. This can be accomplished by specifying the `node_napi_label` configuration value in the package.json `binary.package_name` property. + +Placing the configuration value `node_napi_label` in the package.json `binary.package_name` property instructs `node-pre-gyp` to build all viable N-API binaries supported by the current Node instance. If the current Node instance does not support N-API, `node-pre-gyp` will request a traditional, non-N-API build. + +The configuration value `node_napi_label` is set by `node-pre-gyp` to the type of build created, `napi` or `node`, and the version number. For N-API builds, the string contains the N-API version nad has values like `napi-v3`. For traditional, non-N-API builds, the string contains the ABI version with values like `node-v46`. + +Here's how the `binary` configuration above might be changed to support both N-API and NAN builds: + +```js +"binary": { + "module_name": "your_module", + "module_path": "./lib/binding/{node_napi_label}", + "remote_path": "./{module_name}/v{version}/{configuration}/", + "package_name": "{platform}-{arch}-{node_napi_label}.tar.gz", + "host": "https://your_bucket.s3-us-west-1.amazonaws.com", + "napi_versions": [1,3] + } +``` + +The C/C++ symbol `NAPI_VERSION` can be used to distinguish N-API and non-N-API builds. The value of `NAPI_VERSION` is set to the integer N-API version for N-API builds and is set to `0` for non-N-API builds. + +For example: + +```C +#if NAPI_VERSION +// N-API code goes here +#else +// NAN code goes here +#endif +``` + +### Two additional configuration values + +The following two configuration values, which were implemented in previous versions of `node-pre-gyp`, continue to exist, but have been replaced by the `node_napi_label` configuration value described above. + +1. `napi_version` If N-API is supported by the currently executing Node instance, this value is the N-API version number supported by Node. If N-API is not supported, this value is an empty string. + +2. `node_abi_napi` If the value returned for `napi_version` is non empty, this value is `'napi'`. If the value returned for `napi_version` is empty, this value is the value returned for `node_abi`. + +These values are present for use in the `binding.gyp` file and may be used as `{napi_version}` and `{node_abi_napi}` for text substituion in the `binary` properties of the `package.json` file. + +## S3 Hosting + +You can host wherever you choose but S3 is cheap, `node-pre-gyp publish` expects it, and S3 can be integrated well with [Travis.ci](http://travis-ci.org) to automate builds for OS X and Ubuntu, and with [Appveyor](http://appveyor.com) to automate builds for Windows. Here is an approach to do this: + +First, get setup locally and test the workflow: + +#### 1) Create an S3 bucket + +And have your **key** and **secret key** ready for writing to the bucket. + +It is recommended to create a IAM user with a policy that only gives permissions to the specific bucket you plan to publish to. This can be done in the [IAM console](https://console.aws.amazon.com/iam/) by: 1) adding a new user, 2) choosing `Attach User Policy`, 3) Using the `Policy Generator`, 4) selecting `Amazon S3` for the service, 5) adding the actions: `DeleteObject`, `GetObject`, `GetObjectAcl`, `ListBucket`, `PutObject`, `PutObjectAcl`, 6) adding an ARN of `arn:aws:s3:::bucket/*` (replacing `bucket` with your bucket name), and finally 7) clicking `Add Statement` and saving the policy. It should generate a policy like: + +```js +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "Stmt1394587197000", + "Effect": "Allow", + "Action": [ + "s3:DeleteObject", + "s3:GetObject", + "s3:GetObjectAcl", + "s3:ListBucket", + "s3:PutObject", + "s3:PutObjectAcl" + ], + "Resource": [ + "arn:aws:s3:::node-pre-gyp-tests/*" + ] + } + ] +} +``` + +#### 2) Install node-pre-gyp + +Either install it globally: + + npm install node-pre-gyp -g + +Or put the local version on your PATH + + export PATH=`pwd`/node_modules/.bin/:$PATH + +#### 3) Configure AWS credentials + +There are several ways to do this. + +You can use any of the methods described at http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html. + +Or you can create a `~/.node_pre_gyprc` + +Or pass options in any way supported by [RC](https://github.com/dominictarr/rc#standards) + +A `~/.node_pre_gyprc` looks like: + +```js +{ + "accessKeyId": "xxx", + "secretAccessKey": "xxx" +} +``` + +Another way is to use your environment: + + export node_pre_gyp_accessKeyId=xxx + export node_pre_gyp_secretAccessKey=xxx + +You may also need to specify the `region` if it is not explicit in the `host` value you use. The `bucket` can also be specified but it is optional because `node-pre-gyp` will detect it from the `host` value. + +#### 4) Package and publish your build + +Install the `aws-sdk`: + + npm install aws-sdk + +Then publish: + + node-pre-gyp package publish + +Note: if you hit an error like `Hostname/IP doesn't match certificate's altnames` it may mean that you need to provide the `region` option in your config. + +## Appveyor Automation + +[Appveyor](http://www.appveyor.com/) can build binaries and publish the results per commit and supports: + + - Windows Visual Studio 2013 and related compilers + - Both 64 bit (x64) and 32 bit (x86) build configurations + - Multiple Node.js versions + +For an example of doing this see [node-sqlite3's appveyor.yml](https://github.com/mapbox/node-sqlite3/blob/master/appveyor.yml). + +Below is a guide to getting set up: + +#### 1) Create a free Appveyor account + +Go to https://ci.appveyor.com/signup/free and sign in with your GitHub account. + +#### 2) Create a new project + +Go to https://ci.appveyor.com/projects/new and select the GitHub repo for your module + +#### 3) Add appveyor.yml and push it + +Once you have committed an `appveyor.yml` ([appveyor.yml reference](http://www.appveyor.com/docs/appveyor-yml)) to your GitHub repo and pushed it AppVeyor should automatically start building your project. + +#### 4) Create secure variables + +Encrypt your S3 AWS keys by going to and hitting the `encrypt` button. + +Then paste the result into your `appveyor.yml` + +```yml +environment: + node_pre_gyp_accessKeyId: + secure: Dn9HKdLNYvDgPdQOzRq/DqZ/MPhjknRHB1o+/lVU8MA= + node_pre_gyp_secretAccessKey: + secure: W1rwNoSnOku1r+28gnoufO8UA8iWADmL1LiiwH9IOkIVhDTNGdGPJqAlLjNqwLnL +``` + +NOTE: keys are per account but not per repo (this is difference than Travis where keys are per repo but not related to the account used to encrypt them). + +#### 5) Hook up publishing + +Just put `node-pre-gyp package publish` in your `appveyor.yml` after `npm install`. + +#### 6) Publish when you want + +You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`: + + SET CM=%APPVEYOR_REPO_COMMIT_MESSAGE% + if not "%CM%" == "%CM:[publish binary]=%" node-pre-gyp --msvs_version=2013 publish + +If your commit message contains special characters (e.g. `&`) this method might fail. An alternative is to use PowerShell, which gives you additional possibilities, like ignoring case by using `ToLower()`: + + ps: if($env:APPVEYOR_REPO_COMMIT_MESSAGE.ToLower().Contains('[publish binary]')) { node-pre-gyp --msvs_version=2013 publish } + +Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package. + +## Travis Automation + +[Travis](https://travis-ci.org/) can push to S3 after a successful build and supports both: + + - Ubuntu Precise and OS X (64 bit) + - Multiple Node.js versions + +For an example of doing this see [node-add-example's .travis.yml](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/.travis.yml). + +Note: if you need 32 bit binaries, this can be done from a 64 bit Travis machine. See [the node-sqlite3 scripts for an example of doing this](https://github.com/mapbox/node-sqlite3/blob/bae122aa6a2b8a45f6b717fab24e207740e32b5d/scripts/build_against_node.sh#L54-L74). + +Below is a guide to getting set up: + +#### 1) Install the Travis gem + + gem install travis + +#### 2) Create secure variables + +Make sure you run this command from within the directory of your module. + +Use `travis-encrypt` like: + + travis encrypt node_pre_gyp_accessKeyId=${node_pre_gyp_accessKeyId} + travis encrypt node_pre_gyp_secretAccessKey=${node_pre_gyp_secretAccessKey} + +Then put those values in your `.travis.yml` like: + +```yaml +env: + global: + - secure: F+sEL/v56CzHqmCSSES4pEyC9NeQlkoR0Gs/ZuZxX1ytrj8SKtp3MKqBj7zhIclSdXBz4Ev966Da5ctmcTd410p0b240MV6BVOkLUtkjZJyErMBOkeb8n8yVfSoeMx8RiIhBmIvEn+rlQq+bSFis61/JkE9rxsjkGRZi14hHr4M= + - secure: o2nkUQIiABD139XS6L8pxq3XO5gch27hvm/gOdV+dzNKc/s2KomVPWcOyXNxtJGhtecAkABzaW8KHDDi5QL1kNEFx6BxFVMLO8rjFPsMVaBG9Ks6JiDQkkmrGNcnVdxI/6EKTLHTH5WLsz8+J7caDBzvKbEfTux5EamEhxIWgrI= +``` + +More details on Travis encryption at http://about.travis-ci.org/docs/user/encryption-keys/. + +#### 3) Hook up publishing + +Just put `node-pre-gyp package publish` in your `.travis.yml` after `npm install`. + +##### OS X publishing + +If you want binaries for OS X in addition to linux you can enable [multi-os for Travis](http://docs.travis-ci.com/user/multi-os/#Setting-.travis.yml) + +Use a configuration like: + +```yml + +language: cpp + +os: +- linux +- osx + +env: + matrix: + - NODE_VERSION="4" + - NODE_VERSION="6" + +before_install: +- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm +- source ~/.nvm/nvm.sh +- nvm install $NODE_VERSION +- nvm use $NODE_VERSION +``` + +See [Travis OS X Gotchas](#travis-os-x-gotchas) for why we replace `language: node_js` and `node_js:` sections with `language: cpp` and a custom matrix. + +Also create platform specific sections for any deps that need install. For example if you need libpng: + +```yml +- if [ $(uname -s) == 'Linux' ]; then apt-get install libpng-dev; fi; +- if [ $(uname -s) == 'Darwin' ]; then brew install libpng; fi; +``` + +For detailed multi-OS examples see [node-mapnik](https://github.com/mapnik/node-mapnik/blob/master/.travis.yml) and [node-sqlite3](https://github.com/mapbox/node-sqlite3/blob/master/.travis.yml). + +##### Travis OS X Gotchas + +First, unlike the Travis Linux machines, the OS X machines do not put `node-pre-gyp` on PATH by default. To do so you will need to: + +```sh +export PATH=$(pwd)/node_modules/.bin:${PATH} +``` + +Second, the OS X machines do not support using a matrix for installing different Node.js versions. So you need to bootstrap the installation of Node.js in a cross platform way. + +By doing: + +```yml +env: + matrix: + - NODE_VERSION="4" + - NODE_VERSION="6" + +before_install: + - rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm + - source ~/.nvm/nvm.sh + - nvm install $NODE_VERSION + - nvm use $NODE_VERSION +``` + +You can easily recreate the previous behavior of this matrix: + +```yml +node_js: + - "4" + - "6" +``` + +#### 4) Publish when you want + +You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`: + + COMMIT_MESSAGE=$(git log --format=%B --no-merges -n 1 | tr -d '\n') + if [[ ${COMMIT_MESSAGE} =~ "[publish binary]" ]]; then node-pre-gyp publish; fi; + +Then you can trigger new binaries to be built like: + + git commit -a -m "[publish binary]" + +Or, if you don't have any changes to make simply run: + + git commit --allow-empty -m "[publish binary]" + +WARNING: if you are working in a pull request and publishing binaries from there then you will want to avoid double publishing when Travis CI builds both the `push` and `pr`. You only want to run the publish on the `push` commit. See https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/is_pr_merge.sh which is called from https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/publish.sh for an example of how to do this. + +Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package. To automate the publishing of your entire package to npm on Travis see http://about.travis-ci.org/docs/user/deployment/npm/ + +# Versioning + +The `binary` properties of `module_path`, `remote_path`, and `package_name` support variable substitution. The strings are evaluated by `node-pre-gyp` depending on your system and any custom build flags you passed. + + - `node_abi`: The node C++ `ABI` number. This value is available in Javascript as `process.versions.modules` as of [`>= v0.10.4 >= v0.11.7`](https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e) and in C++ as the `NODE_MODULE_VERSION` define much earlier. For versions of Node before this was available we fallback to the V8 major and minor version. + - `platform` matches node's `process.platform` like `linux`, `darwin`, and `win32` unless the user passed the `--target_platform` option to override. + - `arch` matches node's `process.arch` like `x64` or `ia32` unless the user passes the `--target_arch` option to override. + - `libc` matches `require('detect-libc').family` like `glibc` or `musl` unless the user passes the `--target_libc` option to override. + - `configuration` - Either 'Release' or 'Debug' depending on if `--debug` is passed during the build. + - `module_name` - the `binary.module_name` attribute from `package.json`. + - `version` - the semver `version` value for your module from `package.json` (NOTE: ignores the `semver.build` property). + - `major`, `minor`, `patch`, and `prelease` match the individual semver values for your module's `version` + - `build` - the sevmer `build` value. For example it would be `this.that` if your package.json `version` was `v1.0.0+this.that` + - `prerelease` - the semver `prerelease` value. For example it would be `alpha.beta` if your package.json `version` was `v1.0.0-alpha.beta` + + +The options are visible in the code at + +# Download binary files from a mirror + +S3 is broken in China for the well known reason. + +Using the `npm` config argument: `--{module_name}_binary_host_mirror` can download binary files through a mirror. + +e.g.: Install [v8-profiler](https://www.npmjs.com/package/v8-profiler) from `npm`. + +```bash +$ npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ +``` diff --git a/node_modules/node-pre-gyp/appveyor.yml b/node_modules/node-pre-gyp/appveyor.yml new file mode 100644 index 00000000..2e74dbca --- /dev/null +++ b/node_modules/node-pre-gyp/appveyor.yml @@ -0,0 +1,30 @@ +os: Visual Studio 2015 + +environment: + matrix: + - nodejs_version: 6 + - nodejs_version: 10 + +platform: + - x64 + - x86 + +shallow_clone: true + +install: + - ps: Install-Product node $env:nodejs_version $env:Platform + - ps: Set-ExecutionPolicy Unrestricted -Scope CurrentUser -Force + - npm config get + - node --version + - npm --version + - node -e "console.log(process.arch);" + - IF /I "%PLATFORM%" == "x64" set PATH=C:\Python27-x64;%PATH% + - IF /I "%PLATFORM%" == "x86" SET PATH=C:\python27;%PATH% + - IF /I "%PLATFORM%" == "x64" CALL "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64 + - IF /I "%PLATFORM%" == "x86" CALL "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86 + - npm install + - npm test + +build: off +test: off +deploy: off diff --git a/node_modules/node-pre-gyp/bin/node-pre-gyp b/node_modules/node-pre-gyp/bin/node-pre-gyp new file mode 100755 index 00000000..737b002f --- /dev/null +++ b/node_modules/node-pre-gyp/bin/node-pre-gyp @@ -0,0 +1,134 @@ +#!/usr/bin/env node + +"use strict"; + +/** + * Set the title. + */ + +process.title = 'node-pre-gyp'; + +/** + * Module dependencies. + */ + +var node_pre_gyp = require('../'); +var log = require('npmlog'); + +/** + * Process and execute the selected commands. + */ + +var prog = new node_pre_gyp.Run(); +var completed = false; +prog.parseArgv(process.argv); + +if (prog.todo.length === 0) { + if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) { + console.log('v%s', prog.version); + return process.exit(0); + } else if (~process.argv.indexOf('-h') || ~process.argv.indexOf('--help')) { + console.log('%s', prog.usage()); + return process.exit(0); + } + console.log('%s', prog.usage()); + return process.exit(1); +} + +// if --no-color is passed +if (prog.opts && prog.opts.hasOwnProperty('color') && !prog.opts.color) { + log.disableColor(); +} + +log.info('it worked if it ends with', 'ok'); +log.verbose('cli', process.argv); +log.info('using', process.title + '@%s', prog.version); +log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch); + + +/** + * Change dir if -C/--directory was passed. + */ + +var dir = prog.opts.directory; +if (dir) { + var fs = require('fs'); + try { + var stat = fs.statSync(dir); + if (stat.isDirectory()) { + log.info('chdir', dir); + process.chdir(dir); + } else { + log.warn('chdir', dir + ' is not a directory'); + } + } catch (e) { + if (e.code === 'ENOENT') { + log.warn('chdir', dir + ' is not a directory'); + } else { + log.warn('chdir', 'error during chdir() "%s"', e.message); + } + } +} + +function run () { + var command = prog.todo.shift(); + if (!command) { + // done! + completed = true; + log.info('ok'); + return; + } + + prog.commands[command.name](command.args, function (err) { + if (err) { + log.error(command.name + ' error'); + log.error('stack', err.stack); + errorMessage(); + log.error('not ok'); + console.log(err.message); + return process.exit(1); + } + var args_array = [].slice.call(arguments, 1); + if (args_array.length) { + console.log.apply(console, args_array); + } + // now run the next command in the queue + process.nextTick(run); + }); +} + +process.on('exit', function (code) { + if (!completed && !code) { + log.error('Completion callback never invoked!'); + issueMessage(); + process.exit(6); + } +}); + +process.on('uncaughtException', function (err) { + log.error('UNCAUGHT EXCEPTION'); + log.error('stack', err.stack); + issueMessage(); + process.exit(7); +}); + +function errorMessage () { + // copied from npm's lib/util/error-handler.js + var os = require('os'); + log.error('System', os.type() + ' ' + os.release()); + log.error('command', process.argv.map(JSON.stringify).join(' ')); + log.error('cwd', process.cwd()); + log.error('node -v', process.version); + log.error(process.title+' -v', 'v' + prog.package.version); +} + +function issueMessage () { + errorMessage(); + log.error('', [ 'This is a bug in `'+process.title+'`.', + 'Try to update '+process.title+' and file an issue if it does not help:', + ' ', + ].join('\n')); +} + +// start running the given commands! +run(); diff --git a/node_modules/node-pre-gyp/bin/node-pre-gyp.cmd b/node_modules/node-pre-gyp/bin/node-pre-gyp.cmd new file mode 100644 index 00000000..46e14b54 --- /dev/null +++ b/node_modules/node-pre-gyp/bin/node-pre-gyp.cmd @@ -0,0 +1,2 @@ +@echo off +node "%~dp0\node-pre-gyp" %* diff --git a/node_modules/node-pre-gyp/contributing.md b/node_modules/node-pre-gyp/contributing.md new file mode 100644 index 00000000..4038fa6a --- /dev/null +++ b/node_modules/node-pre-gyp/contributing.md @@ -0,0 +1,10 @@ +# Contributing + + +### Releasing a new version: + +- Ensure tests are passing on travis and appveyor +- Run `node scripts/abi_crosswalk.js` and commit any changes +- Update the changelog +- Tag a new release like: `git tag -a v0.6.34 -m "tagging v0.6.34" && git push --tags` +- Run `npm publish` diff --git a/node_modules/node-pre-gyp/lib/build.js b/node_modules/node-pre-gyp/lib/build.js new file mode 100644 index 00000000..43c137e3 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/build.js @@ -0,0 +1,51 @@ +"use strict"; + +module.exports = exports = build; + +exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp'; + +var napi = require('./util/napi.js'); +var compile = require('./util/compile.js'); +var handle_gyp_opts = require('./util/handle_gyp_opts.js'); +var configure = require('./configure.js'); + +function do_build(gyp,argv,callback) { + handle_gyp_opts(gyp,argv,function(err,result) { + var final_args = ['build'].concat(result.gyp).concat(result.pre); + if (result.unparsed.length > 0) { + final_args = final_args. + concat(['--']). + concat(result.unparsed); + } + if (!err && result.opts.napi_build_version) { + napi.swap_build_dir_in(result.opts.napi_build_version); + } + compile.run_gyp(final_args,result.opts,function(err) { + if (result.opts.napi_build_version) { + napi.swap_build_dir_out(result.opts.napi_build_version); + } + return callback(err); + }); + }); +} + +function build(gyp, argv, callback) { + + // Form up commands to pass to node-gyp: + // We map `node-pre-gyp build` to `node-gyp configure build` so that we do not + // trigger a clean and therefore do not pay the penalty of a full recompile + if (argv.length && (argv.indexOf('rebuild') > -1)) { + argv.shift(); // remove `rebuild` + // here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means + // "clean + configure + build" and triggers a full recompile + compile.run_gyp(['clean'],{},function(err) { + if (err) return callback(err); + configure(gyp,argv,function(err) { + if (err) return callback(err); + return do_build(gyp,argv,callback); + }); + }); + } else { + return do_build(gyp,argv,callback); + } +} diff --git a/node_modules/node-pre-gyp/lib/clean.js b/node_modules/node-pre-gyp/lib/clean.js new file mode 100644 index 00000000..e2da17b2 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/clean.js @@ -0,0 +1,32 @@ +"use strict"; + +module.exports = exports = clean; + +exports.usage = 'Removes the entire folder containing the compiled .node module'; + +var fs = require('fs'); +var rm = require('rimraf'); +var exists = require('fs').exists || require('path').exists; +var versioning = require('./util/versioning.js'); +var napi = require('./util/napi.js'); +var path = require('path'); + +function clean (gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var napi_build_version = napi.get_napi_build_version_from_command_args(argv); + var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + var to_delete = opts.module_path; + if (!to_delete) { + return callback(new Error("module_path is empty, refusing to delete")); + } else if (path.normalize(to_delete) == path.normalize(process.cwd())) { + return callback(new Error("module_path is not set, refusing to delete")); + } else { + exists(to_delete, function(found) { + if (found) { + if (!gyp.opts.silent_clean) console.log('['+package_json.name+'] Removing "%s"', to_delete); + return rm(to_delete, callback); + } + return callback(); + }); + } +} diff --git a/node_modules/node-pre-gyp/lib/configure.js b/node_modules/node-pre-gyp/lib/configure.js new file mode 100644 index 00000000..a6e34382 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/configure.js @@ -0,0 +1,52 @@ +"use strict"; + +module.exports = exports = configure; + +exports.usage = 'Attempts to configure node-gyp or nw-gyp build'; + +var napi = require('./util/napi.js'); +var compile = require('./util/compile.js'); +var handle_gyp_opts = require('./util/handle_gyp_opts.js'); + +function configure(gyp, argv, callback) { + handle_gyp_opts(gyp,argv,function(err,result) { + var final_args = result.gyp.concat(result.pre); + // pull select node-gyp configure options out of the npm environ + var known_gyp_args = ['dist-url','python','nodedir','msvs_version']; + known_gyp_args.forEach(function(key) { + var val = gyp.opts[key] || gyp.opts[key.replace('-','_')]; + if (val) { + final_args.push('--'+key+'='+val); + } + }); + // --ensure=false tell node-gyp to re-install node development headers + // but it is only respected by node-gyp install, so we have to call install + // as a separate step if the user passes it + if (gyp.opts.ensure === false) { + var install_args = final_args.concat(['install','--ensure=false']); + compile.run_gyp(install_args,result.opts,function(err) { + if (err) return callback(err); + if (result.unparsed.length > 0) { + final_args = final_args. + concat(['--']). + concat(result.unparsed); + } + compile.run_gyp(['configure'].concat(final_args),result.opts,function(err) { + return callback(err); + }); + }); + } else { + if (result.unparsed.length > 0) { + final_args = final_args. + concat(['--']). + concat(result.unparsed); + } + compile.run_gyp(['configure'].concat(final_args),result.opts,function(err) { + if (!err && result.opts.napi_build_version) { + napi.swap_build_dir_out(result.opts.napi_build_version); + } + return callback(err); + }); + } + }); +} diff --git a/node_modules/node-pre-gyp/lib/info.js b/node_modules/node-pre-gyp/lib/info.js new file mode 100644 index 00000000..aff9bf83 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/info.js @@ -0,0 +1,40 @@ +"use strict"; + +module.exports = exports = unpublish; + +exports.usage = 'Lists all published binaries (requires aws-sdk)'; + +var fs = require('fs'); +var log = require('npmlog'); +var versioning = require('./util/versioning.js'); +var s3_setup = require('./util/s3_setup.js'); +var config = require('rc')("node_pre_gyp",{acl:"public-read"}); + +function unpublish(gyp, argv, callback) { + var AWS = require("aws-sdk"); + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var opts = versioning.evaluate(package_json, gyp.opts); + s3_setup.detect(opts.hosted_path,config); + AWS.config.update(config); + var s3 = new AWS.S3(); + var s3_opts = { Bucket: config.bucket, + Prefix: config.prefix + }; + s3.listObjects(s3_opts, function(err, meta){ + if (err && err.code == 'NotFound') { + return callback(new Error('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix)); + } else if(err) { + return callback(err); + } else { + log.verbose(JSON.stringify(meta,null,1)); + if (meta && meta.Contents) { + meta.Contents.forEach(function(obj) { + console.log(obj.Key); + }); + } else { + console.error('['+package_json.name+'] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix ); + } + return callback(); + } + }); +} diff --git a/node_modules/node-pre-gyp/lib/install.js b/node_modules/node-pre-gyp/lib/install.js new file mode 100644 index 00000000..cacce67e --- /dev/null +++ b/node_modules/node-pre-gyp/lib/install.js @@ -0,0 +1,255 @@ +"use strict"; + +module.exports = exports = install; + +exports.usage = 'Attempts to install pre-built binary for module'; + +var fs = require('fs'); +var path = require('path'); +var log = require('npmlog'); +var existsAsync = fs.exists || path.exists; +var versioning = require('./util/versioning.js'); +var napi = require('./util/napi.js'); +var mkdirp = require('mkdirp'); + +var npgVersion = 'unknown'; +try { + // Read own package.json to get the current node-pre-pyp version. + var ownPackageJSON = fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8'); + npgVersion = JSON.parse(ownPackageJSON).version; +} catch (e) {} + +var http_get = { + impl: undefined, + type: undefined +}; + +try { + http_get.impl = require('request'); + http_get.type = 'request'; + log.warn("Using request for node-pre-gyp https download"); +} catch (e) { + http_get.impl = require('needle'); + http_get.type = 'needle'; + log.warn("Using needle for node-pre-gyp https download"); +} + +function download(uri,opts,callback) { + log.http('GET', uri); + + var req = null; + + // Try getting version info from the currently running npm. + var envVersionInfo = process.env.npm_config_user_agent || + 'node ' + process.version; + + var requestOpts = { + uri: uri.replace('+','%2B'), + headers: { + 'User-Agent': 'node-pre-gyp (v' + npgVersion + ', ' + envVersionInfo + ')' + }, + follow_max: 10, + }; + + if (opts.cafile) { + try { + requestOpts.ca = fs.readFileSync(opts.cafile); + } catch (e) { + return callback(e); + } + } else if (opts.ca) { + requestOpts.ca = opts.ca; + } + + var proxyUrl = opts.proxy || + process.env.http_proxy || + process.env.HTTP_PROXY || + process.env.npm_config_proxy; + if (proxyUrl) { + if (/^https?:\/\//i.test(proxyUrl)) { + log.verbose('download', 'using proxy url: "%s"', proxyUrl); + requestOpts.proxy = proxyUrl; + } else { + log.warn('download', 'ignoring invalid "proxy" config setting: "%s"', proxyUrl); + } + } + try { + req = http_get.impl.get(requestOpts.uri, requestOpts); + } catch (e) { + return callback(e); + } + if (req) { + req.on('response', function (res) { + log.http(res.statusCode, uri); + }); + } + return callback(null,req); +} + +function place_binary(from,to,opts,callback) { + download(from,opts,function(err,req) { + if (err) return callback(err); + if (!req) return callback(new Error("empty req")); + var badDownload = false; + var extractCount = 0; + var hasResponse = false; + var tar = require('tar'); + + function afterTarball(err) { + if (err) return callback(err); + if (badDownload) return callback(new Error("bad download")); + if (extractCount === 0) { + return callback(new Error('There was a fatal problem while downloading/extracting the tarball')); + } + log.info('tarball', 'done parsing tarball'); + callback(); + } + + function filter_func(entry) { + log.info('install','unpacking ' + entry.path); + extractCount++; + } + + // for request compatibility + req.on('error', function(err) { + badDownload = true; + if (!hasResponse) { + hasResponse = true; + return callback(err); + } + }); + + // for needle compatibility + req.on('err', function(err) { + badDownload = true; + if (!hasResponse) { + hasResponse = true; + return callback(err); + } + }); + + req.on('close', function () { + if (!hasResponse) { + hasResponse = true; + return callback(new Error('Connection closed while downloading tarball file')); + } + }); + + req.on('response', function(res) { + // ignore redirects, needle handles these automatically. + if (http_get.type === 'needle' && res.headers.hasOwnProperty('location') && res.headers.location !== '') { + return; + } + if (hasResponse) { + return; + } + hasResponse = true; + if (res.statusCode !== 200) { + badDownload = true; + var err = new Error(res.statusCode + ' status code downloading tarball ' + from); + err.statusCode = res.statusCode; + return callback(err); + } + // start unzipping and untaring + req.pipe(tar.extract({ + cwd: to, + strip: 1, + onentry: filter_func + }).on('close', afterTarball).on('error', callback)); + }); + }); +} + +function do_build(gyp,argv,callback) { + var args = ['rebuild'].concat(argv); + gyp.todo.push( { name: 'build', args: args } ); + process.nextTick(callback); +} + +function print_fallback_error(err,opts,package_json) { + var fallback_message = ' (falling back to source compile with node-gyp)'; + var full_message = ''; + if (err.statusCode !== undefined) { + // If we got a network response it but failed to download + // it means remote binaries are not available, so let's try to help + // the user/developer with the info to debug why + full_message = "Pre-built binaries not found for " + package_json.name + "@" + package_json.version; + full_message += " and " + opts.runtime + "@" + (opts.target || process.versions.node) + " (" + opts.node_abi + " ABI, " + opts.libc + ")"; + full_message += fallback_message; + log.warn("Tried to download(" + err.statusCode + "): " + opts.hosted_tarball); + log.warn(full_message); + log.http(err.message); + } else { + // If we do not have a statusCode that means an unexpected error + // happened and prevented an http response, so we output the exact error + full_message = "Pre-built binaries not installable for " + package_json.name + "@" + package_json.version; + full_message += " and " + opts.runtime + "@" + (opts.target || process.versions.node) + " (" + opts.node_abi + " ABI, " + opts.libc + ")"; + full_message += fallback_message; + log.warn(full_message); + log.warn("Hit error " + err.message); + } +} + +function install(gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var napi_build_version = napi.get_napi_build_version_from_command_args(argv); + var source_build = gyp.opts['build-from-source'] || gyp.opts.build_from_source; + var update_binary = gyp.opts['update-binary'] || gyp.opts.update_binary; + var should_do_source_build = source_build === package_json.name || (source_build === true || source_build === 'true'); + if (should_do_source_build) { + log.info('build','requesting source compile'); + return do_build(gyp,argv,callback); + } else { + var fallback_to_build = gyp.opts['fallback-to-build'] || gyp.opts.fallback_to_build; + var should_do_fallback_build = fallback_to_build === package_json.name || (fallback_to_build === true || fallback_to_build === 'true'); + // but allow override from npm + if (process.env.npm_config_argv) { + var cooked = JSON.parse(process.env.npm_config_argv).cooked; + var match = cooked.indexOf("--fallback-to-build"); + if (match > -1 && cooked.length > match && cooked[match+1] == "false") { + should_do_fallback_build = false; + log.info('install','Build fallback disabled via npm flag: --fallback-to-build=false'); + } + } + var opts; + try { + opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + } catch (err) { + return callback(err); + } + + opts.ca = gyp.opts.ca; + opts.cafile = gyp.opts.cafile; + + var from = opts.hosted_tarball; + var to = opts.module_path; + var binary_module = path.join(to,opts.module_name + '.node'); + existsAsync(binary_module,function(found) { + if (found && !update_binary) { + console.log('['+package_json.name+'] Success: "' + binary_module + '" already installed'); + console.log('Pass --update-binary to reinstall or --build-from-source to recompile'); + return callback(); + } else { + if (!update_binary) log.info('check','checked for "' + binary_module + '" (not found)'); + mkdirp(to,function(err) { + if (err) { + after_place(err); + } else { + place_binary(from,to,opts,after_place); + } + }); + } + function after_place(err) { + if (err && should_do_fallback_build) { + print_fallback_error(err,opts,package_json); + return do_build(gyp,argv,callback); + } else if (err) { + return callback(err); + } else { + console.log('['+package_json.name+'] Success: "' + binary_module + '" is installed via remote'); + return callback(); + } + } + }); + } +} diff --git a/node_modules/node-pre-gyp/lib/node-pre-gyp.js b/node_modules/node-pre-gyp/lib/node-pre-gyp.js new file mode 100644 index 00000000..7d09b5fa --- /dev/null +++ b/node_modules/node-pre-gyp/lib/node-pre-gyp.js @@ -0,0 +1,203 @@ +"use strict"; + +/** + * Module exports. + */ + +module.exports = exports; + +/** + * Module dependencies. + */ + +var fs = require('fs'); +var path = require('path'); +var nopt = require('nopt'); +var log = require('npmlog'); +log.disableProgress(); +var napi = require('./util/napi.js'); + +var EE = require('events').EventEmitter; +var inherits = require('util').inherits; +var commands = [ + 'clean', + 'install', + 'reinstall', + 'build', + 'rebuild', + 'package', + 'testpackage', + 'publish', + 'unpublish', + 'info', + 'testbinary', + 'reveal', + 'configure' + ]; +var aliases = {}; + +// differentiate node-pre-gyp's logs from npm's +log.heading = 'node-pre-gyp'; + +exports.find = require('./pre-binding').find; + +function Run() { + var self = this; + + this.commands = {}; + + commands.forEach(function (command) { + self.commands[command] = function (argv, callback) { + log.verbose('command', command, argv); + return require('./' + command)(self, argv, callback); + }; + }); +} +inherits(Run, EE); +exports.Run = Run; +var proto = Run.prototype; + +/** + * Export the contents of the package.json. + */ + +proto.package = require('../package.json'); + +/** + * nopt configuration definitions + */ + +proto.configDefs = { + help: Boolean, // everywhere + arch: String, // 'configure' + debug: Boolean, // 'build' + directory: String, // bin + proxy: String, // 'install' + loglevel: String, // everywhere +}; + +/** + * nopt shorthands + */ + +proto.shorthands = { + release: '--no-debug', + C: '--directory', + debug: '--debug', + j: '--jobs', + silent: '--loglevel=silent', + silly: '--loglevel=silly', + verbose: '--loglevel=verbose', +}; + +/** + * expose the command aliases for the bin file to use. + */ + +proto.aliases = aliases; + +/** + * Parses the given argv array and sets the 'opts', + * 'argv' and 'command' properties. + */ + +proto.parseArgv = function parseOpts (argv) { + this.opts = nopt(this.configDefs, this.shorthands, argv); + this.argv = this.opts.argv.remain.slice(); + var commands = this.todo = []; + + // create a copy of the argv array with aliases mapped + argv = this.argv.map(function (arg) { + // is this an alias? + if (arg in this.aliases) { + arg = this.aliases[arg]; + } + return arg; + }, this); + + // process the mapped args into "command" objects ("name" and "args" props) + argv.slice().forEach(function (arg) { + if (arg in this.commands) { + var args = argv.splice(0, argv.indexOf(arg)); + argv.shift(); + if (commands.length > 0) { + commands[commands.length - 1].args = args; + } + commands.push({ name: arg, args: [] }); + } + }, this); + if (commands.length > 0) { + commands[commands.length - 1].args = argv.splice(0); + } + + // expand commands entries for multiple napi builds + var dir = this.opts.directory; + if (dir == null) dir = process.cwd(); + var package_json = JSON.parse(fs.readFileSync(path.join(dir,'package.json'))); + + this.todo = napi.expand_commands (package_json, this.opts, commands); + + // support for inheriting config env variables from npm + var npm_config_prefix = 'npm_config_'; + Object.keys(process.env).forEach(function (name) { + if (name.indexOf(npm_config_prefix) !== 0) return; + var val = process.env[name]; + if (name === npm_config_prefix + 'loglevel') { + log.level = val; + } else { + // add the user-defined options to the config + name = name.substring(npm_config_prefix.length); + // avoid npm argv clobber already present args + // which avoids problem of 'npm test' calling + // script that runs unique npm install commands + if (name === 'argv') { + if (this.opts.argv && + this.opts.argv.remain && + this.opts.argv.remain.length) { + // do nothing + } else { + this.opts[name] = val; + } + } else { + this.opts[name] = val; + } + } + }, this); + + if (this.opts.loglevel) { + log.level = this.opts.loglevel; + } + log.resume(); +}; + +/** + * Returns the usage instructions for node-pre-gyp. + */ + +proto.usage = function usage () { + var str = [ + '', + ' Usage: node-pre-gyp [options]', + '', + ' where is one of:', + commands.map(function (c) { + return ' - ' + c + ' - ' + require('./' + c).usage; + }).join('\n'), + '', + 'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'), + 'node@' + process.versions.node + ].join('\n'); + return str; +}; + +/** + * Version number getter. + */ + +Object.defineProperty(proto, 'version', { + get: function () { + return this.package.version; + }, + enumerable: true +}); + diff --git a/node_modules/node-pre-gyp/lib/package.js b/node_modules/node-pre-gyp/lib/package.js new file mode 100644 index 00000000..4959265f --- /dev/null +++ b/node_modules/node-pre-gyp/lib/package.js @@ -0,0 +1,56 @@ +"use strict"; + +module.exports = exports = _package; + +exports.usage = 'Packs binary (and enclosing directory) into locally staged tarball'; + +var fs = require('fs'); +var path = require('path'); +var log = require('npmlog'); +var versioning = require('./util/versioning.js'); +var napi = require('./util/napi.js'); +var write = require('fs').createWriteStream; +var existsAsync = fs.exists || path.exists; +var mkdirp = require('mkdirp'); +var tar = require('tar'); + +function _package(gyp, argv, callback) { + var packlist = require('npm-packlist'); + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var napi_build_version = napi.get_napi_build_version_from_command_args(argv); + var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + var from = opts.module_path; + var binary_module = path.join(from,opts.module_name + '.node'); + existsAsync(binary_module,function(found) { + if (!found) { + return callback(new Error("Cannot package because " + binary_module + " missing: run `node-pre-gyp rebuild` first")); + } + var tarball = opts.staged_tarball; + var filter_func = function(entry) { + // ensure directories are +x + // https://github.com/mapnik/node-mapnik/issues/262 + log.info('package','packing ' + entry.path); + return true; + }; + mkdirp(path.dirname(tarball),function(err) { + if (err) return callback(err); + packlist({ path: from }).then(function(files) { + var base = path.basename(from); + files = files.map(function(file) { + return path.join(base, file); + }); + tar.create({ + portable: true, + gzip: true, + onentry: filter_func, + file: tarball, + cwd: path.dirname(from) + }, files, function(err) { + if (err) console.error('['+package_json.name+'] ' + err.message); + else log.info('package','Binary staged at "' + tarball + '"'); + return callback(err); + }); + }, callback); + }); + }); +} diff --git a/node_modules/node-pre-gyp/lib/pre-binding.js b/node_modules/node-pre-gyp/lib/pre-binding.js new file mode 100644 index 00000000..09e076d7 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/pre-binding.js @@ -0,0 +1,30 @@ +"use strict"; + +var versioning = require('../lib/util/versioning.js'); +var napi = require('../lib/util/napi.js'); +var existsSync = require('fs').existsSync || require('path').existsSync; +var path = require('path'); + +module.exports = exports; + +exports.usage = 'Finds the require path for the node-pre-gyp installed module'; + +exports.validate = function(package_json,opts) { + versioning.validate_config(package_json,opts); +}; + +exports.find = function(package_json_path,opts) { + if (!existsSync(package_json_path)) { + throw new Error("package.json does not exist at " + package_json_path); + } + var package_json = require(package_json_path); + versioning.validate_config(package_json,opts); + var napi_build_version; + if (napi.get_napi_build_versions (package_json, opts)) { + napi_build_version = napi.get_best_napi_build_version(package_json, opts); + } + opts = opts || {}; + if (!opts.module_root) opts.module_root = path.dirname(package_json_path); + var meta = versioning.evaluate(package_json,opts,napi_build_version); + return meta.module; +}; diff --git a/node_modules/node-pre-gyp/lib/publish.js b/node_modules/node-pre-gyp/lib/publish.js new file mode 100644 index 00000000..376e3984 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/publish.js @@ -0,0 +1,79 @@ +"use strict"; + +module.exports = exports = publish; + +exports.usage = 'Publishes pre-built binary (requires aws-sdk)'; + +var fs = require('fs'); +var path = require('path'); +var log = require('npmlog'); +var versioning = require('./util/versioning.js'); +var napi = require('./util/napi.js'); +var s3_setup = require('./util/s3_setup.js'); +var existsAsync = fs.exists || path.exists; +var url = require('url'); +var config = require('rc')("node_pre_gyp",{acl:"public-read"}); + +function publish(gyp, argv, callback) { + var AWS = require("aws-sdk"); + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var napi_build_version = napi.get_napi_build_version_from_command_args(argv); + var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + var tarball = opts.staged_tarball; + existsAsync(tarball,function(found) { + if (!found) { + return callback(new Error("Cannot publish because " + tarball + " missing: run `node-pre-gyp package` first")); + } + log.info('publish', 'Detecting s3 credentials'); + s3_setup.detect(opts.hosted_path,config); + var key_name = url.resolve(config.prefix,opts.package_name); + log.info('publish', 'Authenticating with s3'); + AWS.config.update(config); + var s3 = new AWS.S3(); + var s3_opts = { Bucket: config.bucket, + Key: key_name + }; + var remote_package = 'https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key; + log.info('publish', 'Checking for existing binary at ' + remote_package); + s3.headObject(s3_opts, function(err, meta){ + if (meta) log.info('publish', JSON.stringify(meta)); + if (err && err.code == 'NotFound') { + // we are safe to publish because + // the object does not already exist + log.info('publish', 'Preparing to put object'); + var s3_put = new AWS.S3(); + var s3_put_opts = { ACL: config.acl, + Body: fs.createReadStream(tarball), + Bucket: config.bucket, + Key: key_name + }; + log.info('publish', 'Putting object'); + try { + s3_put.putObject(s3_put_opts, function(err, resp){ + log.info('publish', 'returned from putting object'); + if(err) { + log.info('publish', 's3 putObject error: "' + err + '"'); + return callback(err); + } + if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"'); + log.info('publish', 'successfully put object'); + console.log('['+package_json.name+'] published to ' + remote_package); + return callback(); + }); + } catch (err) { + log.info('publish', 's3 putObject error: "' + err + '"'); + return callback(err); + } + } else if (err) { + log.info('publish', 's3 headObject error: "' + err + '"'); + return callback(err); + } else { + log.error('publish','Cannot publish over existing version'); + log.error('publish',"Update the 'version' field in package.json and try again"); + log.error('publish','If the previous version was published in error see:'); + log.error('publish','\t node-pre-gyp unpublish'); + return callback(new Error('Failed publishing to ' + remote_package)); + } + }); + }); +} diff --git a/node_modules/node-pre-gyp/lib/rebuild.js b/node_modules/node-pre-gyp/lib/rebuild.js new file mode 100644 index 00000000..615a5245 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/rebuild.js @@ -0,0 +1,21 @@ +"use strict"; + +module.exports = exports = rebuild; + +exports.usage = 'Runs "clean" and "build" at once'; + +var fs = require('fs'); +var napi = require('./util/napi.js'); + +function rebuild (gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var commands = [ + { name: 'clean', args: [] }, + { name: 'build', args: ['rebuild'] } + ]; + commands = napi.expand_commands(package_json, gyp.opts, commands); + for (var i = commands.length; i !== 0; i--) { + gyp.todo.unshift(commands[i-1]); + } + process.nextTick(callback); +} diff --git a/node_modules/node-pre-gyp/lib/reinstall.js b/node_modules/node-pre-gyp/lib/reinstall.js new file mode 100644 index 00000000..10e85fcf --- /dev/null +++ b/node_modules/node-pre-gyp/lib/reinstall.js @@ -0,0 +1,20 @@ +"use strict"; + +module.exports = exports = rebuild; + +exports.usage = 'Runs "clean" and "install" at once'; + +var fs = require('fs'); +var napi = require('./util/napi.js'); + +function rebuild (gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var installArgs = []; + var napi_build_version = napi.get_best_napi_build_version(package_json, gyp.opts); + if (napi_build_version != null) installArgs = [ napi.get_command_arg (napi_build_version) ]; + gyp.todo.unshift( + { name: 'clean', args: [] }, + { name: 'install', args: installArgs } + ); + process.nextTick(callback); +} diff --git a/node_modules/node-pre-gyp/lib/reveal.js b/node_modules/node-pre-gyp/lib/reveal.js new file mode 100644 index 00000000..13d2f725 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/reveal.js @@ -0,0 +1,33 @@ +"use strict"; + +module.exports = exports = reveal; + +exports.usage = 'Reveals data on the versioned binary'; + +var fs = require('fs'); +var versioning = require('./util/versioning.js'); +var napi = require('./util/napi.js'); + +function unix_paths(key, val) { + return val && val.replace ? val.replace(/\\/g, '/') : val; +} + +function reveal(gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var napi_build_version = napi.get_napi_build_version_from_command_args(argv); + var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + var hit = false; + // if a second arg is passed look to see + // if it is a known option + //console.log(JSON.stringify(gyp.opts,null,1)) + var remain = gyp.opts.argv.remain[gyp.opts.argv.remain.length-1]; + if (remain && opts.hasOwnProperty(remain)) { + console.log(opts[remain].replace(/\\/g, '/')); + hit = true; + } + // otherwise return all options as json + if (!hit) { + console.log(JSON.stringify(opts,unix_paths,2)); + } + return callback(); +} diff --git a/node_modules/node-pre-gyp/lib/testbinary.js b/node_modules/node-pre-gyp/lib/testbinary.js new file mode 100644 index 00000000..453987c3 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/testbinary.js @@ -0,0 +1,81 @@ +"use strict"; + +module.exports = exports = testbinary; + +exports.usage = 'Tests that the binary.node can be required'; + +var fs = require('fs'); +var path = require('path'); +var log = require('npmlog'); +var cp = require('child_process'); +var versioning = require('./util/versioning.js'); +var napi = require('./util/napi.js'); +var path = require('path'); + +function testbinary(gyp, argv, callback) { + var args = []; + var options = {}; + var shell_cmd = process.execPath; + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var napi_build_version = napi.get_napi_build_version_from_command_args(argv); + var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + // skip validation for runtimes we don't explicitly support (like electron) + if (opts.runtime && + opts.runtime !== 'node-webkit' && + opts.runtime !== 'node') { + return callback(); + } + var nw = (opts.runtime && opts.runtime === 'node-webkit'); + // ensure on windows that / are used for require path + var binary_module = opts.module.replace(/\\/g, '/'); + if ((process.arch != opts.target_arch) || + (process.platform != opts.target_platform)) { + var msg = "skipping validation since host platform/arch ("; + msg += process.platform+'/'+process.arch+")"; + msg += " does not match target ("; + msg += opts.target_platform+'/'+opts.target_arch+")"; + log.info('validate', msg); + return callback(); + } + if (nw) { + options.timeout = 5000; + if (process.platform === 'darwin') { + shell_cmd = 'node-webkit'; + } else if (process.platform === 'win32') { + shell_cmd = 'nw.exe'; + } else { + shell_cmd = 'nw'; + } + var modulePath = path.resolve(binary_module); + var appDir = path.join(__dirname, 'util', 'nw-pre-gyp'); + args.push(appDir); + args.push(modulePath); + log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'"); + cp.execFile(shell_cmd, args, options, function(err, stdout, stderr) { + // check for normal timeout for node-webkit + if (err) { + if (err.killed === true && err.signal && err.signal.indexOf('SIG') > -1) { + return callback(); + } + var stderrLog = stderr.toString(); + log.info('stderr', stderrLog); + if( /^\s*Xlib:\s*extension\s*"RANDR"\s*missing\s*on\s*display\s*":\d+\.\d+"\.\s*$/.test(stderrLog) ){ + log.info('RANDR', 'stderr contains only RANDR error, ignored'); + return callback(); + } + return callback(err); + } + return callback(); + }); + return; + } + args.push('--eval'); + args.push("require('" + binary_module.replace(/'/g, '\'') +"')"); + log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'"); + cp.execFile(shell_cmd, args, options, function(err, stdout, stderr) { + if (err) { + return callback(err, { stdout:stdout, stderr:stderr}); + } + return callback(); + }); +} diff --git a/node_modules/node-pre-gyp/lib/testpackage.js b/node_modules/node-pre-gyp/lib/testpackage.js new file mode 100644 index 00000000..9091bc32 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/testpackage.js @@ -0,0 +1,55 @@ +"use strict"; + +module.exports = exports = testpackage; + +exports.usage = 'Tests that the staged package is valid'; + +var fs = require('fs'); +var path = require('path'); +var log = require('npmlog'); +var existsAsync = fs.exists || path.exists; +var versioning = require('./util/versioning.js'); +var napi = require('./util/napi.js'); +var testbinary = require('./testbinary.js'); +var tar = require('tar'); +var mkdirp = require('mkdirp'); + +function testpackage(gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var napi_build_version = napi.get_napi_build_version_from_command_args(argv); + var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + var tarball = opts.staged_tarball; + existsAsync(tarball, function(found) { + if (!found) { + return callback(new Error("Cannot test package because " + tarball + " missing: run `node-pre-gyp package` first")); + } + var to = opts.module_path; + function filter_func(entry) { + log.info('install','unpacking [' + entry.path + ']'); + } + + mkdirp(to, function(err) { + if (err) { + return callback(err); + } else { + tar.extract({ + file: tarball, + cwd: to, + strip: 1, + onentry: filter_func + }).then(after_extract, callback); + } + }); + + function after_extract() { + testbinary(gyp,argv,function(err) { + if (err) { + return callback(err); + } else { + console.log('['+package_json.name+'] Package appears valid'); + return callback(); + } + }); + } + }); +} diff --git a/node_modules/node-pre-gyp/lib/unpublish.js b/node_modules/node-pre-gyp/lib/unpublish.js new file mode 100644 index 00000000..94c93dd8 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/unpublish.js @@ -0,0 +1,43 @@ +"use strict"; + +module.exports = exports = unpublish; + +exports.usage = 'Unpublishes pre-built binary (requires aws-sdk)'; + +var fs = require('fs'); +var log = require('npmlog'); +var versioning = require('./util/versioning.js'); +var napi = require('./util/napi.js'); +var s3_setup = require('./util/s3_setup.js'); +var url = require('url'); +var config = require('rc')("node_pre_gyp",{acl:"public-read"}); + +function unpublish(gyp, argv, callback) { + var AWS = require("aws-sdk"); + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var napi_build_version = napi.get_napi_build_version_from_command_args(argv); + var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + s3_setup.detect(opts.hosted_path,config); + AWS.config.update(config); + var key_name = url.resolve(config.prefix,opts.package_name); + var s3 = new AWS.S3(); + var s3_opts = { Bucket: config.bucket, + Key: key_name + }; + s3.headObject(s3_opts, function(err, meta) { + if (err && err.code == 'NotFound') { + console.log('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key); + return callback(); + } else if(err) { + return callback(err); + } else { + log.info('unpublish', JSON.stringify(meta)); + s3.deleteObject(s3_opts, function(err, resp) { + if (err) return callback(err); + log.info(JSON.stringify(resp)); + console.log('['+package_json.name+'] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key); + return callback(); + }); + } + }); +} diff --git a/node_modules/node-pre-gyp/lib/util/abi_crosswalk.json b/node_modules/node-pre-gyp/lib/util/abi_crosswalk.json new file mode 100644 index 00000000..ddd794d5 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/abi_crosswalk.json @@ -0,0 +1,1830 @@ +{ + "0.1.14": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.15": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.16": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.17": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.18": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.19": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.20": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.21": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.22": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.23": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.24": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.25": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.26": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.27": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.28": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.29": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.30": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.31": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.32": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.33": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.90": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.91": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.92": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.93": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.94": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.95": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.96": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.97": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.98": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.99": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.100": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.101": { + "node_abi": null, + "v8": "2.3" + }, + "0.1.102": { + "node_abi": null, + "v8": "2.3" + }, + "0.1.103": { + "node_abi": null, + "v8": "2.3" + }, + "0.1.104": { + "node_abi": null, + "v8": "2.3" + }, + "0.2.0": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.1": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.2": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.3": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.4": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.5": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.6": { + "node_abi": 1, + "v8": "2.3" + }, + "0.3.0": { + "node_abi": 1, + "v8": "2.5" + }, + "0.3.1": { + "node_abi": 1, + "v8": "2.5" + }, + "0.3.2": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.3": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.4": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.5": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.6": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.7": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.8": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.0": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.1": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.2": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.3": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.4": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.5": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.6": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.7": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.8": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.9": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.10": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.11": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.12": { + "node_abi": 1, + "v8": "3.1" + }, + "0.5.0": { + "node_abi": 1, + "v8": "3.1" + }, + "0.5.1": { + "node_abi": 1, + "v8": "3.4" + }, + "0.5.2": { + "node_abi": 1, + "v8": "3.4" + }, + "0.5.3": { + "node_abi": 1, + "v8": "3.4" + }, + "0.5.4": { + "node_abi": 1, + "v8": "3.5" + }, + "0.5.5": { + "node_abi": 1, + "v8": "3.5" + }, + "0.5.6": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.7": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.8": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.9": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.10": { + "node_abi": 1, + "v8": "3.7" + }, + "0.6.0": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.1": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.2": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.3": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.4": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.5": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.6": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.7": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.8": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.9": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.10": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.11": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.12": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.13": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.14": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.15": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.16": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.17": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.18": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.19": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.20": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.21": { + "node_abi": 1, + "v8": "3.6" + }, + "0.7.0": { + "node_abi": 1, + "v8": "3.8" + }, + "0.7.1": { + "node_abi": 1, + "v8": "3.8" + }, + "0.7.2": { + "node_abi": 1, + "v8": "3.8" + }, + "0.7.3": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.4": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.5": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.6": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.7": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.8": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.9": { + "node_abi": 1, + "v8": "3.11" + }, + "0.7.10": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.11": { + "node_abi": 1, + "v8": "3.11" + }, + "0.7.12": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.0": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.1": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.2": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.3": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.4": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.5": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.6": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.7": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.8": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.9": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.10": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.11": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.12": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.13": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.14": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.15": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.16": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.17": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.18": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.19": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.20": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.21": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.22": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.23": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.24": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.25": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.26": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.27": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.28": { + "node_abi": 1, + "v8": "3.11" + }, + "0.9.0": { + "node_abi": 1, + "v8": "3.11" + }, + "0.9.1": { + "node_abi": 10, + "v8": "3.11" + }, + "0.9.2": { + "node_abi": 10, + "v8": "3.11" + }, + "0.9.3": { + "node_abi": 10, + "v8": "3.13" + }, + "0.9.4": { + "node_abi": 10, + "v8": "3.13" + }, + "0.9.5": { + "node_abi": 10, + "v8": "3.13" + }, + "0.9.6": { + "node_abi": 10, + "v8": "3.15" + }, + "0.9.7": { + "node_abi": 10, + "v8": "3.15" + }, + "0.9.8": { + "node_abi": 10, + "v8": "3.15" + }, + "0.9.9": { + "node_abi": 11, + "v8": "3.15" + }, + "0.9.10": { + "node_abi": 11, + "v8": "3.15" + }, + "0.9.11": { + "node_abi": 11, + "v8": "3.14" + }, + "0.9.12": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.0": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.1": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.2": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.3": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.4": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.5": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.6": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.7": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.8": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.9": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.10": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.11": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.12": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.13": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.14": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.15": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.16": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.17": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.18": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.19": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.20": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.21": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.22": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.23": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.24": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.25": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.26": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.27": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.28": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.29": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.30": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.31": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.32": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.33": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.34": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.35": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.36": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.37": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.38": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.39": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.40": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.41": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.42": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.43": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.44": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.45": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.46": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.47": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.48": { + "node_abi": 11, + "v8": "3.14" + }, + "0.11.0": { + "node_abi": 12, + "v8": "3.17" + }, + "0.11.1": { + "node_abi": 12, + "v8": "3.18" + }, + "0.11.2": { + "node_abi": 12, + "v8": "3.19" + }, + "0.11.3": { + "node_abi": 12, + "v8": "3.19" + }, + "0.11.4": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.5": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.6": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.7": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.8": { + "node_abi": 13, + "v8": "3.21" + }, + "0.11.9": { + "node_abi": 13, + "v8": "3.22" + }, + "0.11.10": { + "node_abi": 13, + "v8": "3.22" + }, + "0.11.11": { + "node_abi": 14, + "v8": "3.22" + }, + "0.11.12": { + "node_abi": 14, + "v8": "3.22" + }, + "0.11.13": { + "node_abi": 14, + "v8": "3.25" + }, + "0.11.14": { + "node_abi": 14, + "v8": "3.26" + }, + "0.11.15": { + "node_abi": 14, + "v8": "3.28" + }, + "0.11.16": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.0": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.1": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.2": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.3": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.4": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.5": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.6": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.7": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.8": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.9": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.10": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.11": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.12": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.13": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.14": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.15": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.16": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.17": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.18": { + "node_abi": 14, + "v8": "3.28" + }, + "1.0.0": { + "node_abi": 42, + "v8": "3.31" + }, + "1.0.1": { + "node_abi": 42, + "v8": "3.31" + }, + "1.0.2": { + "node_abi": 42, + "v8": "3.31" + }, + "1.0.3": { + "node_abi": 42, + "v8": "4.1" + }, + "1.0.4": { + "node_abi": 42, + "v8": "4.1" + }, + "1.1.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.2.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.3.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.4.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.4.2": { + "node_abi": 43, + "v8": "4.1" + }, + "1.4.3": { + "node_abi": 43, + "v8": "4.1" + }, + "1.5.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.5.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.2": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.3": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.4": { + "node_abi": 43, + "v8": "4.1" + }, + "1.7.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.2": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.3": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.4": { + "node_abi": 43, + "v8": "4.1" + }, + "2.0.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.0.1": { + "node_abi": 44, + "v8": "4.2" + }, + "2.0.2": { + "node_abi": 44, + "v8": "4.2" + }, + "2.1.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.2.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.2.1": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.1": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.2": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.3": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.4": { + "node_abi": 44, + "v8": "4.2" + }, + "2.4.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.5.0": { + "node_abi": 44, + "v8": "4.2" + }, + "3.0.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.1.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.2.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.3.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.3.1": { + "node_abi": 45, + "v8": "4.4" + }, + "4.0.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.1.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.1.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.1.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.4": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.5": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.6": { + "node_abi": 46, + "v8": "4.5" + }, + "4.3.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.3.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.3.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.4": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.5": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.6": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.7": { + "node_abi": 46, + "v8": "4.5" + }, + "4.5.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.6.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.6.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.6.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.7.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.7.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.7.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.7.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.4": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.5": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.6": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.7": { + "node_abi": 46, + "v8": "4.5" + }, + "4.9.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.9.1": { + "node_abi": 46, + "v8": "4.5" + }, + "5.0.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.1.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.1.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.2.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.3.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.4.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.4.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.5.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.6.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.7.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.7.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.8.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.9.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.9.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.10.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.10.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.11.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.11.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.12.0": { + "node_abi": 47, + "v8": "4.6" + }, + "6.0.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.1.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.2.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.2.1": { + "node_abi": 48, + "v8": "5.0" + }, + "6.2.2": { + "node_abi": 48, + "v8": "5.0" + }, + "6.3.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.3.1": { + "node_abi": 48, + "v8": "5.0" + }, + "6.4.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.5.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.6.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.7.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.8.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.8.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.4": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.5": { + "node_abi": 48, + "v8": "5.1" + }, + "6.10.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.10.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.10.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.10.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.4": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.5": { + "node_abi": 48, + "v8": "5.1" + }, + "6.12.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.12.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.12.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.12.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.13.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.13.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.4": { + "node_abi": 48, + "v8": "5.1" + }, + "7.0.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.1.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.2.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.2.1": { + "node_abi": 51, + "v8": "5.4" + }, + "7.3.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.4.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.5.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.6.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.1": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.2": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.3": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.4": { + "node_abi": 51, + "v8": "5.5" + }, + "7.8.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.9.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.10.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.10.1": { + "node_abi": 51, + "v8": "5.5" + }, + "8.0.0": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.0": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.1": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.2": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.3": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.4": { + "node_abi": 57, + "v8": "5.8" + }, + "8.2.0": { + "node_abi": 57, + "v8": "5.8" + }, + "8.2.1": { + "node_abi": 57, + "v8": "5.8" + }, + "8.3.0": { + "node_abi": 57, + "v8": "6.0" + }, + "8.4.0": { + "node_abi": 57, + "v8": "6.0" + }, + "8.5.0": { + "node_abi": 57, + "v8": "6.0" + }, + "8.6.0": { + "node_abi": 57, + "v8": "6.0" + }, + "8.7.0": { + "node_abi": 57, + "v8": "6.1" + }, + "8.8.0": { + "node_abi": 57, + "v8": "6.1" + }, + "8.8.1": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.0": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.1": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.2": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.3": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.4": { + "node_abi": 57, + "v8": "6.1" + }, + "8.10.0": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.0": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.1": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.2": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.3": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.4": { + "node_abi": 57, + "v8": "6.2" + }, + "8.12.0": { + "node_abi": 57, + "v8": "6.2" + }, + "9.0.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.1.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.2.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.2.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.3.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.4.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.5.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.6.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.6.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.7.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.7.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.8.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.9.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.10.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.10.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.11.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.11.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.11.2": { + "node_abi": 59, + "v8": "6.2" + }, + "10.0.0": { + "node_abi": 64, + "v8": "6.6" + }, + "10.1.0": { + "node_abi": 64, + "v8": "6.6" + }, + "10.2.0": { + "node_abi": 64, + "v8": "6.6" + }, + "10.2.1": { + "node_abi": 64, + "v8": "6.6" + }, + "10.3.0": { + "node_abi": 64, + "v8": "6.6" + }, + "10.4.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.4.1": { + "node_abi": 64, + "v8": "6.7" + }, + "10.5.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.6.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.7.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.8.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.9.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.10.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.11.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.12.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.13.0": { + "node_abi": 64, + "v8": "6.8" + }, + "11.0.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.1.0": { + "node_abi": 67, + "v8": "7.0" + } +} \ No newline at end of file diff --git a/node_modules/node-pre-gyp/lib/util/compile.js b/node_modules/node-pre-gyp/lib/util/compile.js new file mode 100644 index 00000000..0dc460cb --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/compile.js @@ -0,0 +1,87 @@ +"use strict"; + +module.exports = exports; + +var fs = require('fs'); +var path = require('path'); +var win = process.platform == 'win32'; +var existsSync = fs.existsSync || path.existsSync; +var cp = require('child_process'); + +// try to build up the complete path to node-gyp +/* priority: + - node-gyp on ENV:npm_config_node_gyp (https://github.com/npm/npm/pull/4887) + - node-gyp on NODE_PATH + - node-gyp inside npm on NODE_PATH (ignore on iojs) + - node-gyp inside npm beside node exe +*/ +function which_node_gyp() { + var node_gyp_bin; + if (process.env.npm_config_node_gyp) { + try { + node_gyp_bin = process.env.npm_config_node_gyp; + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } + } catch (err) { } + } + try { + var node_gyp_main = require.resolve('node-gyp'); + node_gyp_bin = path.join(path.dirname( + path.dirname(node_gyp_main)), + 'bin/node-gyp.js'); + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } + } catch (err) { } + if (process.execPath.indexOf('iojs') === -1) { + try { + var npm_main = require.resolve('npm'); + node_gyp_bin = path.join(path.dirname( + path.dirname(npm_main)), + 'node_modules/node-gyp/bin/node-gyp.js'); + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } + } catch (err) { } + } + var npm_base = path.join(path.dirname( + path.dirname(process.execPath)), + 'lib/node_modules/npm/'); + node_gyp_bin = path.join(npm_base, 'node_modules/node-gyp/bin/node-gyp.js'); + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } +} + +module.exports.run_gyp = function(args,opts,callback) { + var shell_cmd = ''; + var cmd_args = []; + if (opts.runtime && opts.runtime == 'node-webkit') { + shell_cmd = 'nw-gyp'; + if (win) shell_cmd += '.cmd'; + } else { + var node_gyp_path = which_node_gyp(); + if (node_gyp_path) { + shell_cmd = process.execPath; + cmd_args.push(node_gyp_path); + } else { + shell_cmd = 'node-gyp'; + if (win) shell_cmd += '.cmd'; + } + } + var final_args = cmd_args.concat(args); + var cmd = cp.spawn(shell_cmd, final_args, {cwd: undefined, env: process.env, stdio: [ 0, 1, 2]}); + cmd.on('error', function (err) { + if (err) { + return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + err + ")")); + } + callback(null,opts); + }); + cmd.on('close', function (code) { + if (code && code !== 0) { + return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + code + ")")); + } + callback(null,opts); + }); +}; diff --git a/node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js b/node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js new file mode 100644 index 00000000..9f76ea35 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js @@ -0,0 +1,103 @@ +"use strict"; + +module.exports = exports = handle_gyp_opts; + +var fs = require('fs'); +var versioning = require('./versioning.js'); +var napi = require('./napi.js'); + +/* + +Here we gather node-pre-gyp generated options (from versioning) and pass them along to node-gyp. + +We massage the args and options slightly to account for differences in what commands mean between +node-pre-gyp and node-gyp (e.g. see the difference between "build" and "rebuild" below) + +Keep in mind: the values inside `argv` and `gyp.opts` below are different depending on whether +node-pre-gyp is called directory, or if it is called in a `run-script` phase of npm. + +We also try to preserve any command line options that might have been passed to npm or node-pre-gyp. +But this is fairly difficult without passing way to much through. For example `gyp.opts` contains all +the process.env and npm pushes a lot of variables into process.env which node-pre-gyp inherits. So we have +to be very selective about what we pass through. + +For example: + +`npm install --build-from-source` will give: + +argv == [ 'rebuild' ] +gyp.opts.argv == { remain: [ 'install' ], + cooked: [ 'install', '--fallback-to-build' ], + original: [ 'install', '--fallback-to-build' ] } + +`./bin/node-pre-gyp build` will give: + +argv == [] +gyp.opts.argv == { remain: [ 'build' ], + cooked: [ 'build' ], + original: [ '-C', 'test/app1', 'build' ] } + +*/ + +// select set of node-pre-gyp versioning info +// to share with node-gyp +var share_with_node_gyp = [ + 'module', + 'module_name', + 'module_path', + 'napi_version', + 'node_abi_napi', + 'napi_build_version', + 'node_napi_label' +]; + +function handle_gyp_opts(gyp, argv, callback) { + + // Collect node-pre-gyp specific variables to pass to node-gyp + var node_pre_gyp_options = []; + // generate custom node-pre-gyp versioning info + var napi_build_version = napi.get_napi_build_version_from_command_args(argv); + var opts = versioning.evaluate(JSON.parse(fs.readFileSync('./package.json')), gyp.opts, napi_build_version); + share_with_node_gyp.forEach(function(key) { + var val = opts[key]; + if (val) { + node_pre_gyp_options.push('--' + key + '=' + val); + } else if (key === 'napi_build_version') { + node_pre_gyp_options.push('--' + key + '=0'); + } else { + if (key !== 'napi_version' && key !== 'node_abi_napi') + return callback(new Error("Option " + key + " required but not found by node-pre-gyp")); + } + }); + + // Collect options that follow the special -- which disables nopt parsing + var unparsed_options = []; + var double_hyphen_found = false; + gyp.opts.argv.original.forEach(function(opt) { + if (double_hyphen_found) { + unparsed_options.push(opt); + } + if (opt == '--') { + double_hyphen_found = true; + } + }); + + // We try respect and pass through remaining command + // line options (like --foo=bar) to node-gyp + var cooked = gyp.opts.argv.cooked; + var node_gyp_options = []; + cooked.forEach(function(value) { + if (value.length > 2 && value.slice(0,2) == '--') { + var key = value.slice(2); + var val = cooked[cooked.indexOf(value)+1]; + if (val && val.indexOf('--') === -1) { // handle '--foo=bar' or ['--foo','bar'] + node_gyp_options.push('--' + key + '=' + val); + } else { // pass through --foo + node_gyp_options.push(value); + } + } + }); + + var result = {'opts':opts,'gyp':node_gyp_options,'pre':node_pre_gyp_options,'unparsed':unparsed_options}; + return callback(null,result); +} diff --git a/node_modules/node-pre-gyp/lib/util/napi.js b/node_modules/node-pre-gyp/lib/util/napi.js new file mode 100644 index 00000000..1e45ca08 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/napi.js @@ -0,0 +1,204 @@ +"use strict"; + +var fs = require('fs'); +var rm = require('rimraf'); +var log = require('npmlog'); + +module.exports = exports; + +var versionArray = process.version + .substr(1) + .replace(/-.*$/, '') + .split('.') + .map(function(item) { + return +item; + }); + +var napi_multiple_commands = [ + 'build', + 'clean', + 'configure', + 'package', + 'publish', + 'reveal', + 'testbinary', + 'testpackage', + 'unpublish' +]; + +var napi_build_version_tag = 'napi_build_version='; + +module.exports.get_napi_version = function(target) { // target may be undefined + // returns the non-zero numeric napi version or undefined if napi is not supported. + // correctly supporting target requires an updated cross-walk + var version = process.versions.napi; // can be undefined + if (!version) { // this code should never need to be updated + if (versionArray[0] === 9 && versionArray[1] >= 3) version = 2; // 9.3.0+ + else if (versionArray[0] === 8) version = 1; // 8.0.0+ + } + return version; +}; + +module.exports.get_napi_version_as_string = function(target) { + // returns the napi version as a string or an empty string if napi is not supported. + var version = module.exports.get_napi_version(target); + return version ? ''+version : ''; +}; + +module.exports.validate_package_json = function(package_json, opts) { // throws Error + + var binary = package_json.binary; + var module_path_ok = pathOK(binary.module_path); + var remote_path_ok = pathOK(binary.remote_path); + var package_name_ok = pathOK(binary.package_name); + var napi_build_versions = module.exports.get_napi_build_versions(package_json,opts,true); + var napi_build_versions_raw = module.exports.get_napi_build_versions_raw(package_json); + + if (napi_build_versions) { + napi_build_versions.forEach(function(napi_build_version){ + if (!(parseInt(napi_build_version,10) === napi_build_version && napi_build_version > 0)) { + throw new Error("All values specified in napi_versions must be positive integers."); + } + }); + } + + if (napi_build_versions && (!module_path_ok || (!remote_path_ok && !package_name_ok))) { + throw new Error("When napi_versions is specified; module_path and either remote_path or " + + "package_name must contain the substitution string '{napi_build_version}`."); + } + + if ((module_path_ok || remote_path_ok || package_name_ok) && !napi_build_versions_raw) { + throw new Error("When the substitution string '{napi_build_version}` is specified in " + + "module_path, remote_path, or package_name; napi_versions must also be specified."); + } + + if (napi_build_versions && !module.exports.get_best_napi_build_version(package_json, opts) && + module.exports.build_napi_only(package_json)) { + throw new Error( + 'The N-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' + + 'This module supports N-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' + + 'This Node instance cannot run this module.'); + } + + if (napi_build_versions_raw && !napi_build_versions && module.exports.build_napi_only(package_json)) { + throw new Error( + 'The N-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' + + 'This module supports N-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' + + 'This Node instance cannot run this module.'); + } + +}; + +function pathOK (path) { + return path && (path.indexOf('{napi_build_version}') !== -1 || path.indexOf('{node_napi_label}') !== -1); +} + +module.exports.expand_commands = function(package_json, opts, commands) { + var expanded_commands = []; + var napi_build_versions = module.exports.get_napi_build_versions(package_json, opts); + commands.forEach(function(command){ + if (napi_build_versions && command.name === 'install') { + var napi_build_version = module.exports.get_best_napi_build_version(package_json, opts); + var args = napi_build_version ? [ napi_build_version_tag+napi_build_version ] : [ ]; + expanded_commands.push ({ name: command.name, args: args }); + } else if (napi_build_versions && napi_multiple_commands.indexOf(command.name) !== -1) { + napi_build_versions.forEach(function(napi_build_version){ + var args = command.args.slice(); + args.push (napi_build_version_tag+napi_build_version); + expanded_commands.push ({ name: command.name, args: args }); + }); + } else { + expanded_commands.push (command); + } + }); + return expanded_commands; +}; + +module.exports.get_napi_build_versions = function(package_json, opts, warnings) { // opts may be undefined + var napi_build_versions = []; + var supported_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined); + // remove duplicates, verify each napi version can actaully be built + if (package_json.binary && package_json.binary.napi_versions) { + package_json.binary.napi_versions.forEach(function(napi_version) { + var duplicated = napi_build_versions.indexOf(napi_version) !== -1; + if (!duplicated && supported_napi_version && napi_version <= supported_napi_version) { + napi_build_versions.push(napi_version); + } else if (warnings && !duplicated && supported_napi_version) { + log.info('This Node instance does not support builds for N-API version', napi_version); + } + }); + } + if (opts && opts["build-latest-napi-version-only"]) { + var latest_version = 0; + napi_build_versions.forEach(function(napi_version) { + if (napi_version > latest_version) latest_version = napi_version; + }); + napi_build_versions = latest_version ? [ latest_version ] : []; + } + return napi_build_versions.length ? napi_build_versions : undefined; +}; + +module.exports.get_napi_build_versions_raw = function(package_json) { + var napi_build_versions = []; + // remove duplicates + if (package_json.binary && package_json.binary.napi_versions) { + package_json.binary.napi_versions.forEach(function(napi_version) { + if (napi_build_versions.indexOf(napi_version) === -1) { + napi_build_versions.push(napi_version); + } + }); + } + return napi_build_versions.length ? napi_build_versions : undefined; +}; + +module.exports.get_command_arg = function(napi_build_version) { + return napi_build_version_tag + napi_build_version; +}; + +module.exports.get_napi_build_version_from_command_args = function(command_args) { + for (var i = 0; i < command_args.length; i++) { + var arg = command_args[i]; + if (arg.indexOf(napi_build_version_tag) === 0) { + return parseInt(arg.substr(napi_build_version_tag.length),10); + } + } + return undefined; +}; + +module.exports.swap_build_dir_out = function(napi_build_version) { + if (napi_build_version) { + rm.sync(module.exports.get_build_dir(napi_build_version)); + fs.renameSync('build', module.exports.get_build_dir(napi_build_version)); + } +}; + +module.exports.swap_build_dir_in = function(napi_build_version) { + if (napi_build_version) { + rm.sync('build'); + fs.renameSync(module.exports.get_build_dir(napi_build_version), 'build'); + } +}; + +module.exports.get_build_dir = function(napi_build_version) { + return 'build-tmp-napi-v'+napi_build_version; +}; + +module.exports.get_best_napi_build_version = function(package_json, opts) { + var best_napi_build_version = 0; + var napi_build_versions = module.exports.get_napi_build_versions (package_json, opts); + if (napi_build_versions) { + var our_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined); + napi_build_versions.forEach(function(napi_build_version){ + if (napi_build_version > best_napi_build_version && + napi_build_version <= our_napi_version) { + best_napi_build_version = napi_build_version; + } + }); + } + return best_napi_build_version === 0 ? undefined : best_napi_build_version; +}; + +module.exports.build_napi_only = function(package_json) { + return package_json.binary && package_json.binary.package_name && + package_json.binary.package_name.indexOf('{node_napi_label}') === -1; +}; \ No newline at end of file diff --git a/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html b/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html new file mode 100644 index 00000000..244466c4 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html @@ -0,0 +1,26 @@ + + + + +Node-webkit-based module test + + + +

Node-webkit-based module test

+ + diff --git a/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json b/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json new file mode 100644 index 00000000..71d03f82 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json @@ -0,0 +1,9 @@ +{ +"main": "index.html", +"name": "nw-pre-gyp-module-test", +"description": "Node-webkit-based module test.", +"version": "0.0.1", +"window": { + "show": false +} +} diff --git a/node_modules/node-pre-gyp/lib/util/s3_setup.js b/node_modules/node-pre-gyp/lib/util/s3_setup.js new file mode 100644 index 00000000..5bc42e96 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/s3_setup.js @@ -0,0 +1,27 @@ +"use strict"; + +module.exports = exports; + +var url = require('url'); + +var URI_REGEX="^(.*)\.(s3(?:-.*)?)\.amazonaws\.com$"; + +module.exports.detect = function(to,config) { + var uri = url.parse(to); + var hostname_matches = uri.hostname.match(URI_REGEX); + config.prefix = (!uri.pathname || uri.pathname == '/') ? '' : uri.pathname.replace('/',''); + if(!hostname_matches) { + return; + } + if (!config.bucket) { + config.bucket = hostname_matches[1]; + } + if (!config.region) { + var s3_domain = hostname_matches[2]; + if (s3_domain.slice(0,3) == 's3-' && + s3_domain.length >= 3) { + // it appears the region is explicit in the url + config.region = s3_domain.replace('s3-',''); + } + } +}; diff --git a/node_modules/node-pre-gyp/lib/util/versioning.js b/node_modules/node-pre-gyp/lib/util/versioning.js new file mode 100644 index 00000000..fafb0da0 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/versioning.js @@ -0,0 +1,331 @@ +"use strict"; + +module.exports = exports; + +var path = require('path'); +var semver = require('semver'); +var url = require('url'); +var detect_libc = require('detect-libc'); +var napi = require('./napi.js'); + +var abi_crosswalk; + +// This is used for unit testing to provide a fake +// ABI crosswalk that emulates one that is not updated +// for the current version +if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) { + abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK); +} else { + abi_crosswalk = require('./abi_crosswalk.json'); +} + +var major_versions = {}; +Object.keys(abi_crosswalk).forEach(function(v) { + var major = v.split('.')[0]; + if (!major_versions[major]) { + major_versions[major] = v; + } +}); + +function get_electron_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_electron_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if electron is the target."); + } + // Electron guarantees that patch version update won't break native modules. + var sem_ver = semver.parse(target_version); + return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor; +} +module.exports.get_electron_abi = get_electron_abi; + +function get_node_webkit_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_node_webkit_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if node-webkit is the target."); + } + return runtime + '-v' + target_version; +} +module.exports.get_node_webkit_abi = get_node_webkit_abi; + +function get_node_abi(runtime, versions) { + if (!runtime) { + throw new Error("get_node_abi requires valid runtime arg"); + } + if (!versions) { + throw new Error("get_node_abi requires valid process.versions object"); + } + var sem_ver = semver.parse(versions.node); + if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series + // https://github.com/mapbox/node-pre-gyp/issues/124 + return runtime+'-v'+versions.node; + } else { + // process.versions.modules added in >= v0.10.4 and v0.11.7 + // https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e + return versions.modules ? runtime+'-v' + (+versions.modules) : + 'v8-' + versions.v8.split('.').slice(0,2).join('.'); + } +} +module.exports.get_node_abi = get_node_abi; + +function get_runtime_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_runtime_abi requires valid runtime arg"); + } + if (runtime === 'node-webkit') { + return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']); + } else if (runtime === 'electron') { + return get_electron_abi(runtime, target_version || process.versions.electron); + } else { + if (runtime != 'node') { + throw new Error("Unknown Runtime: '" + runtime + "'"); + } + if (!target_version) { + return get_node_abi(runtime,process.versions); + } else { + var cross_obj; + // abi_crosswalk generated with ./scripts/abi_crosswalk.js + if (abi_crosswalk[target_version]) { + cross_obj = abi_crosswalk[target_version]; + } else { + var target_parts = target_version.split('.').map(function(i) { return +i; }); + if (target_parts.length != 3) { // parse failed + throw new Error("Unknown target version: " + target_version); + } + /* + The below code tries to infer the last known ABI compatible version + that we have recorded in the abi_crosswalk.json when an exact match + is not possible. The reasons for this to exist are complicated: + + - We support passing --target to be able to allow developers to package binaries for versions of node + that are not the same one as they are running. This might also be used in combination with the + --target_arch or --target_platform flags to also package binaries for alternative platforms + - When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node + version that is running in memory + - So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look + this info up for all versions + - But we cannot easily predict what the future ABI will be for released versions + - And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly + by being fully available at install time. + - So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release + need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if + you want the `--target` flag to keep working for the latest version + - Which is impractical ^^ + - Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding. + + In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that + only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be + ABI compatible with v0.10.33). + + TODO: use semver module instead of custom version parsing + */ + var major = target_parts[0]; + var minor = target_parts[1]; + var patch = target_parts[2]; + // io.js: yeah if node.js ever releases 1.x this will break + // but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616 + if (major === 1) { + // look for last release that is the same major version + // e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0 + while (true) { + if (minor > 0) --minor; + if (patch > 0) --patch; + var new_iojs_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_iojs_target]) { + cross_obj = abi_crosswalk[new_iojs_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target'); + break; + } + if (minor === 0 && patch === 0) { + break; + } + } + } else if (major >= 2) { + // look for last release that is the same major version + if (major_versions[major]) { + cross_obj = abi_crosswalk[major_versions[major]]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + major_versions[major] + ' as ABI compatible target'); + } + } else if (major === 0) { // node.js + if (target_parts[1] % 2 === 0) { // for stable/even node.js series + // look for the last release that is the same minor release + // e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0 + while (--patch > 0) { + var new_node_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_node_target]) { + cross_obj = abi_crosswalk[new_node_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target'); + break; + } + } + } + } + } + if (!cross_obj) { + throw new Error("Unsupported target version: " + target_version); + } + // emulate process.versions + var versions_obj = { + node: target_version, + v8: cross_obj.v8+'.0', + // abi_crosswalk uses 1 for node versions lacking process.versions.modules + // process.versions.modules added in >= v0.10.4 and v0.11.7 + modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined + }; + return get_node_abi(runtime, versions_obj); + } + } +} +module.exports.get_runtime_abi = get_runtime_abi; + +var required_parameters = [ + 'module_name', + 'module_path', + 'host' +]; + +function validate_config(package_json,opts) { + var msg = package_json.name + ' package.json is not node-pre-gyp ready:\n'; + var missing = []; + if (!package_json.main) { + missing.push('main'); + } + if (!package_json.version) { + missing.push('version'); + } + if (!package_json.name) { + missing.push('name'); + } + if (!package_json.binary) { + missing.push('binary'); + } + var o = package_json.binary; + required_parameters.forEach(function(p) { + if (missing.indexOf('binary') > -1) { + missing.pop('binary'); + } + if (!o || o[p] === undefined || o[p] === "") { + missing.push('binary.' + p); + } + }); + if (missing.length >= 1) { + throw new Error(msg+"package.json must declare these properties: \n" + missing.join('\n')); + } + if (o) { + // enforce https over http + var protocol = url.parse(o.host).protocol; + if (protocol === 'http:') { + throw new Error("'host' protocol ("+protocol+") is invalid - only 'https:' is accepted"); + } + } + napi.validate_package_json(package_json,opts); +} + +module.exports.validate_config = validate_config; + +function eval_template(template,opts) { + Object.keys(opts).forEach(function(key) { + var pattern = '{'+key+'}'; + while (template.indexOf(pattern) > -1) { + template = template.replace(pattern,opts[key]); + } + }); + return template; +} + +// url.resolve needs single trailing slash +// to behave correctly, otherwise a double slash +// may end up in the url which breaks requests +// and a lacking slash may not lead to proper joining +function fix_slashes(pathname) { + if (pathname.slice(-1) != '/') { + return pathname + '/'; + } + return pathname; +} + +// remove double slashes +// note: path.normalize will not work because +// it will convert forward to back slashes +function drop_double_slashes(pathname) { + return pathname.replace(/\/\//g,'/'); +} + +function get_process_runtime(versions) { + var runtime = 'node'; + if (versions['node-webkit']) { + runtime = 'node-webkit'; + } else if (versions.electron) { + runtime = 'electron'; + } + return runtime; +} + +module.exports.get_process_runtime = get_process_runtime; + +var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz'; +var default_remote_path = ''; + +module.exports.evaluate = function(package_json,options,napi_build_version) { + options = options || {}; + validate_config(package_json,options); // options is a suitable substitute for opts in this case + var v = package_json.version; + var module_version = semver.parse(v); + var runtime = options.runtime || get_process_runtime(process.versions); + var opts = { + name: package_json.name, + configuration: Boolean(options.debug) ? 'Debug' : 'Release', + debug: options.debug, + module_name: package_json.binary.module_name, + version: module_version.version, + prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '', + build: module_version.build.length ? module_version.build.join('.') : '', + major: module_version.major, + minor: module_version.minor, + patch: module_version.patch, + runtime: runtime, + node_abi: get_runtime_abi(runtime,options.target), + node_abi_napi: napi.get_napi_version(options.target) ? 'napi' : get_runtime_abi(runtime,options.target), + napi_version: napi.get_napi_version(options.target), // non-zero numeric, undefined if unsupported + napi_build_version: napi_build_version || '', + node_napi_label: napi_build_version ? 'napi-v' + napi_build_version : get_runtime_abi(runtime,options.target), + target: options.target || '', + platform: options.target_platform || process.platform, + target_platform: options.target_platform || process.platform, + arch: options.target_arch || process.arch, + target_arch: options.target_arch || process.arch, + libc: options.target_libc || detect_libc.family || 'unknown', + module_main: package_json.main, + toolset : options.toolset || '' // address https://github.com/mapbox/node-pre-gyp/issues/119 + }; + // support host mirror with npm config `--{module_name}_binary_host_mirror` + // e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25 + // > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ + var host = process.env['npm_config_' + opts.module_name + '_binary_host_mirror'] || package_json.binary.host; + opts.host = fix_slashes(eval_template(host,opts)); + opts.module_path = eval_template(package_json.binary.module_path,opts); + // now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably + if (options.module_root) { + // resolve relative to known module root: works for pre-binding require + opts.module_path = path.join(options.module_root,opts.module_path); + } else { + // resolve relative to current working directory: works for node-pre-gyp commands + opts.module_path = path.resolve(opts.module_path); + } + opts.module = path.join(opts.module_path,opts.module_name + '.node'); + opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path,opts))) : default_remote_path; + var package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name; + opts.package_name = eval_template(package_name,opts); + opts.staged_tarball = path.join('build/stage',opts.remote_path,opts.package_name); + opts.hosted_path = url.resolve(opts.host,opts.remote_path); + opts.hosted_tarball = url.resolve(opts.hosted_path,opts.package_name); + return opts; +}; diff --git a/node_modules/node-pre-gyp/node_modules/.bin/detect-libc b/node_modules/node-pre-gyp/node_modules/.bin/detect-libc new file mode 120000 index 00000000..ed8d8868 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/detect-libc @@ -0,0 +1 @@ +../../../detect-libc/bin/detect-libc.js \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/mkdirp b/node_modules/node-pre-gyp/node_modules/.bin/mkdirp new file mode 120000 index 00000000..91a5f623 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/mkdirp @@ -0,0 +1 @@ +../../../mkdirp/bin/cmd.js \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/needle b/node_modules/node-pre-gyp/node_modules/.bin/needle new file mode 120000 index 00000000..6a4b21e1 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/needle @@ -0,0 +1 @@ +../../../needle/bin/needle \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/nopt b/node_modules/node-pre-gyp/node_modules/.bin/nopt new file mode 120000 index 00000000..6b6566ea --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/nopt @@ -0,0 +1 @@ +../nopt/bin/nopt.js \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/rc b/node_modules/node-pre-gyp/node_modules/.bin/rc new file mode 120000 index 00000000..e63a3f2a --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/rc @@ -0,0 +1 @@ +../../../rc/cli.js \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/rimraf b/node_modules/node-pre-gyp/node_modules/.bin/rimraf new file mode 120000 index 00000000..632d6da2 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/rimraf @@ -0,0 +1 @@ +../../../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/semver b/node_modules/node-pre-gyp/node_modules/.bin/semver new file mode 120000 index 00000000..b3ca6032 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/semver @@ -0,0 +1 @@ +../../../semver/bin/semver \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/nopt/.npmignore b/node_modules/node-pre-gyp/node_modules/nopt/.npmignore new file mode 100644 index 00000000..3c3629e6 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/node-pre-gyp/node_modules/nopt/.travis.yml b/node_modules/node-pre-gyp/node_modules/nopt/.travis.yml new file mode 100644 index 00000000..a1cef591 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - '0.12' + - '4' + - '6' + - '7' +before_install: + - npm install -g npm@latest diff --git a/node_modules/node-pre-gyp/node_modules/nopt/CHANGELOG.md b/node_modules/node-pre-gyp/node_modules/nopt/CHANGELOG.md new file mode 100644 index 00000000..82a09fb4 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/CHANGELOG.md @@ -0,0 +1,58 @@ +### v4.0.1 (2016-12-14) + +#### WHOOPS + +* [`fb9b1ce`](https://github.com/npm/nopt/commit/fb9b1ce57b3c69b4f7819015be87719204f77ef6) + Merged so many patches at once that the code fencing + ([@adius](https://github.com/adius)) added got broken. Sorry, + ([@adius](https://github.com/adius))! + ([@othiym23](https://github.com/othiym23)) + +### v4.0.0 (2016-12-13) + +#### BREAKING CHANGES + +* [`651d447`](https://github.com/npm/nopt/commit/651d4473946096d341a480bbe56793de3fc706aa) + When parsing String-typed arguments, if the next value is `""`, don't simply + swallow it. ([@samjonester](https://github.com/samjonester)) + +#### PERFORMANCE TWEAKS + +* [`3370ce8`](https://github.com/npm/nopt/commit/3370ce87a7618ba228883861db84ddbcdff252a9) + Simplify initialization. ([@elidoran](https://github.com/elidoran)) +* [`356e58e`](https://github.com/npm/nopt/commit/356e58e3b3b431a4b1af7fd7bdee44c2c0526a09) + Store `Array.isArray(types[arg])` for reuse. + ([@elidoran](https://github.com/elidoran)) +* [`0d95e90`](https://github.com/npm/nopt/commit/0d95e90515844f266015b56d2c80b94e5d14a07e) + Interpret single-item type arrays as a single type. + ([@samjonester](https://github.com/samjonester)) +* [`07c69d3`](https://github.com/npm/nopt/commit/07c69d38b5186450941fbb505550becb78a0e925) + Simplify key-value extraction. ([@elidoran](https://github.com/elidoran)) +* [`39b6e5c`](https://github.com/npm/nopt/commit/39b6e5c65ac47f60cd43a1fbeece5cd4c834c254) + Only call `Date.parse(val)` once. ([@elidoran](https://github.com/elidoran)) +* [`934943d`](https://github.com/npm/nopt/commit/934943dffecb55123a2b15959fe2a359319a5dbd) + Use `osenv.home()` to find a user's home directory instead of assuming it's + always `$HOME`. ([@othiym23](https://github.com/othiym23)) + +#### TEST & CI IMPROVEMENTS + +* [`326ffff`](https://github.com/npm/nopt/commit/326ffff7f78a00bcd316adecf69075f8a8093619) + Fix `/tmp` test to work on Windows. + ([@elidoran](https://github.com/elidoran)) +* [`c89d31a`](https://github.com/npm/nopt/commit/c89d31a49d14f2238bc6672db08da697bbc57f1b) + Only run Windows tests on Windows, only run Unix tests on a Unix. + ([@elidoran](https://github.com/elidoran)) +* [`affd3d1`](https://github.com/npm/nopt/commit/affd3d1d0addffa93006397b2013b18447339366) + Refresh Travis to run the tests against the currently-supported batch of npm + versions. ([@helio](https://github.com/helio)-frota) +* [`55f9449`](https://github.com/npm/nopt/commit/55f94497d163ed4d16dd55fd6c4fb95cc440e66d) + `tap@8.0.1` ([@othiym23](https://github.com/othiym23)) + +#### DOC TWEAKS + +* [`5271229`](https://github.com/npm/nopt/commit/5271229ee7c810217dd51616c086f5d9ab224581) + Use JavaScript code block for syntax highlighting. + ([@adius](https://github.com/adius)) +* [`c0d156f`](https://github.com/npm/nopt/commit/c0d156f229f9994c5dfcec4a8886eceff7a07682) + The code sample in the README had `many2: [ oneThing ]`, and now it has + `many2: [ two, things ]`. ([@silkentrance](https://github.com/silkentrance)) diff --git a/node_modules/node-pre-gyp/node_modules/nopt/LICENSE b/node_modules/node-pre-gyp/node_modules/nopt/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-pre-gyp/node_modules/nopt/README.md b/node_modules/node-pre-gyp/node_modules/nopt/README.md new file mode 100644 index 00000000..a99531c0 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/README.md @@ -0,0 +1,213 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you hit the limit of your frustration just trying to +manage them all, and defer it with duct-tape solutions until you see +exactly to the core of the problem, and finally snap and write an +awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + +```javascript +// my-program.js +var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many1" : [String, Array] + , "many2" : [path, Array] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) +console.log(parsed) +``` + +This would give you support for any of the following: + +```console +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk -fp # unknown opts are ok. +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many1 5 --many1 null --many1 foo +{ many1: ["5", "null", "foo"] } + +$ node my-program.js --many2 foo --many2 bar +{ many2: ["/path/to/foo", "path/to/bar"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. When types are ordered, this implies a +preference, and the first type that can be used to properly interpret +the value will be used. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid values for +known options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/node_modules/node-pre-gyp/node_modules/nopt/bin/nopt.js b/node_modules/node-pre-gyp/node_modules/nopt/bin/nopt.js new file mode 100755 index 00000000..3232d4c5 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/bin/nopt.js @@ -0,0 +1,54 @@ +#!/usr/bin/env node +var nopt = require("../lib/nopt") + , path = require("path") + , types = { num: Number + , bool: Boolean + , help: Boolean + , list: Array + , "num-list": [Number, Array] + , "str-list": [String, Array] + , "bool-list": [Boolean, Array] + , str: String + , clear: Boolean + , config: Boolean + , length: Number + , file: path + } + , shorthands = { s: [ "--str", "astring" ] + , b: [ "--bool" ] + , nb: [ "--no-bool" ] + , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] + , "?": ["--help"] + , h: ["--help"] + , H: ["--help"] + , n: [ "--num", "125" ] + , c: ["--config"] + , l: ["--length"] + , f: ["--file"] + } + , parsed = nopt( types + , shorthands + , process.argv + , 2 ) + +console.log("parsed", parsed) + +if (parsed.help) { + console.log("") + console.log("nopt cli tester") + console.log("") + console.log("types") + console.log(Object.keys(types).map(function M (t) { + var type = types[t] + if (Array.isArray(type)) { + return [t, type.map(function (type) { return type.name })] + } + return [t, type && type.name] + }).reduce(function (s, i) { + s[i[0]] = i[1] + return s + }, {})) + console.log("") + console.log("shorthands") + console.log(shorthands) +} diff --git a/node_modules/node-pre-gyp/node_modules/nopt/examples/my-program.js b/node_modules/node-pre-gyp/node_modules/nopt/examples/my-program.js new file mode 100755 index 00000000..142447e1 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/examples/my-program.js @@ -0,0 +1,30 @@ +#!/usr/bin/env node + +//process.env.DEBUG_NOPT = 1 + +// my-program.js +var nopt = require("../lib/nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag", "true"] + , "g" : ["--flag"] + , "s" : "--flag" + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + +console.log("parsed =\n"+ require("util").inspect(parsed)) diff --git a/node_modules/node-pre-gyp/node_modules/nopt/lib/nopt.js b/node_modules/node-pre-gyp/node_modules/nopt/lib/nopt.js new file mode 100644 index 00000000..1fb11351 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/lib/nopt.js @@ -0,0 +1,436 @@ +// info about each config option. + +var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + ? function () { console.error.apply(console, arguments) } + : function () {} + +var url = require("url") + , path = require("path") + , Stream = require("stream").Stream + , abbrev = require("abbrev") + , osenv = require("osenv") + +module.exports = exports = nopt +exports.clean = clean + +exports.typeDefs = + { String : { type: String, validate: validateString } + , Boolean : { type: Boolean, validate: validateBoolean } + , url : { type: url, validate: validateUrl } + , Number : { type: Number, validate: validateNumber } + , path : { type: path, validate: validatePath } + , Stream : { type: Stream, validate: validateStream } + , Date : { type: Date, validate: validateDate } + } + +function nopt (types, shorthands, args, slice) { + args = args || process.argv + types = types || {} + shorthands = shorthands || {} + if (typeof slice !== "number") slice = 2 + + debug(types, shorthands, args, slice) + + args = args.slice(slice) + var data = {} + , key + , argv = { + remain: [], + cooked: args, + original: args.slice(0) + } + + parse(args, data, argv.remain, types, shorthands) + // now data is full + clean(data, types, exports.typeDefs) + data.argv = argv + Object.defineProperty(data.argv, 'toString', { value: function () { + return this.original.map(JSON.stringify).join(" ") + }, enumerable: false }) + return data +} + +function clean (data, types, typeDefs) { + typeDefs = typeDefs || exports.typeDefs + var remove = {} + , typeDefault = [false, true, null, String, Array] + + Object.keys(data).forEach(function (k) { + if (k === "argv") return + var val = data[k] + , isArray = Array.isArray(val) + , type = types[k] + if (!isArray) val = [val] + if (!type) type = typeDefault + if (type === Array) type = typeDefault.concat(Array) + if (!Array.isArray(type)) type = [type] + + debug("val=%j", val) + debug("types=", type) + val = val.map(function (val) { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof val === "string") { + debug("string %j", val) + val = val.trim() + if ((val === "null" && ~type.indexOf(null)) + || (val === "true" && + (~type.indexOf(true) || ~type.indexOf(Boolean))) + || (val === "false" && + (~type.indexOf(false) || ~type.indexOf(Boolean)))) { + val = JSON.parse(val) + debug("jsonable %j", val) + } else if (~type.indexOf(Number) && !isNaN(val)) { + debug("convert to number", val) + val = +val + } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { + debug("convert to date", val) + val = new Date(val) + } + } + + if (!types.hasOwnProperty(k)) { + return val + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (val === false && ~type.indexOf(null) && + !(~type.indexOf(false) || ~type.indexOf(Boolean))) { + val = null + } + + var d = {} + d[k] = val + debug("prevalidated val", d, val, types[k]) + if (!validate(d, k, val, types[k], typeDefs)) { + if (exports.invalidHandler) { + exports.invalidHandler(k, val, types[k], data) + } else if (exports.invalidHandler !== false) { + debug("invalid: "+k+"="+val, types[k]) + } + return remove + } + debug("validated val", d, val, types[k]) + return d[k] + }).filter(function (val) { return val !== remove }) + + if (!val.length) delete data[k] + else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else data[k] = val[0] + + debug("k=%s val=%j", k, val, data[k]) + }) +} + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + if (val === true) return false + if (val === null) return true + + val = String(val) + + var isWin = process.platform === 'win32' + , homePattern = isWin ? /^~(\/|\\)/ : /^~\// + , home = osenv.home() + + if (home && val.match(homePattern)) { + data[k] = path.resolve(home, val.substr(2)) + } else { + data[k] = path.resolve(val) + } + return true +} + +function validateNumber (data, k, val) { + debug("validate Number %j %j %j", k, val, isNaN(val)) + if (isNaN(val)) return false + data[k] = +val +} + +function validateDate (data, k, val) { + var s = Date.parse(val) + debug("validate Date %j %j %j", k, val, s) + if (isNaN(s)) return false + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (val instanceof Boolean) val = val.valueOf() + else if (typeof val === "string") { + if (!isNaN(val)) val = !!(+val) + else if (val === "null" || val === "false") val = false + else val = true + } else val = !!val + data[k] = val +} + +function validateUrl (data, k, val) { + val = url.parse(String(val)) + if (!val.host) return false + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) return false + data[k] = val +} + +function validate (data, k, val, type, typeDefs) { + // arrays are lists of types. + if (Array.isArray(type)) { + for (var i = 0, l = type.length; i < l; i ++) { + if (type[i] === Array) continue + if (validate(data, k, val, type[i], typeDefs)) return true + } + delete data[k] + return false + } + + // an array of anything? + if (type === Array) return true + + // NaN is poisonous. Means that something is not allowed. + if (type !== type) { + debug("Poison NaN", k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug("Explicitly allowed %j", val) + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + var ok = false + , types = Object.keys(typeDefs) + for (var i = 0, l = types.length; i < l; i ++) { + debug("test type %j %j %j", k, val, types[i]) + var t = typeDefs[types[i]] + if (t && + ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) { + var d = {} + ok = false !== t.validate(d, k, val) + val = d[k] + if (ok) { + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + break + } + } + } + debug("OK? %j (%j %j %j)", ok, k, val, types[i]) + + if (!ok) delete data[k] + return ok +} + +function parse (args, data, remain, types, shorthands) { + debug("parse", args, data, remain) + + var key = null + , abbrevs = abbrev(Object.keys(types)) + , shortAbbr = abbrev(Object.keys(shorthands)) + + for (var i = 0; i < args.length; i ++) { + var arg = args[i] + debug("arg", arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = "--" + break + } + var hadEq = false + if (arg.charAt(0) === "-" && arg.length > 1) { + var at = arg.indexOf('=') + if (at > -1) { + hadEq = true + var v = arg.substr(at + 1) + arg = arg.substr(0, at) + args.splice(i, 1, arg, v) + } + + // see if it's a shorthand + // if so, splice and back up to re-parse it. + var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) + debug("arg=%j shRes=%j", arg, shRes) + if (shRes) { + debug(arg, shRes) + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i -- + continue + } + } + arg = arg.replace(/^-+/, "") + var no = null + while (arg.toLowerCase().indexOf("no-") === 0) { + no = !no + arg = arg.substr(3) + } + + if (abbrevs[arg]) arg = abbrevs[arg] + + var argType = types[arg] + var isTypeArray = Array.isArray(argType) + if (isTypeArray && argType.length === 1) { + isTypeArray = false + argType = argType[0] + } + + var isArray = argType === Array || + isTypeArray && argType.indexOf(Array) !== -1 + + // allow unknown things to be arrays if specified multiple times. + if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { + if (!Array.isArray(data[arg])) + data[arg] = [data[arg]] + isArray = true + } + + var val + , la = args[i + 1] + + var isBool = typeof no === 'boolean' || + argType === Boolean || + isTypeArray && argType.indexOf(Boolean) !== -1 || + (typeof argType === 'undefined' && !hadEq) || + (la === "false" && + (argType === null || + isTypeArray && ~argType.indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === "true" || la === "false") { + val = JSON.parse(la) + la = null + if (no) val = !val + i ++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (isTypeArray && la) { + if (~argType.indexOf(la)) { + // an explicit type + val = la + i ++ + } else if ( la === "null" && ~argType.indexOf(null) ) { + // null allowed + val = null + i ++ + } else if ( !la.match(/^-{2,}[^-]/) && + !isNaN(la) && + ~argType.indexOf(Number) ) { + // number + val = +la + i ++ + } else if ( !la.match(/^-[^-]/) && ~argType.indexOf(String) ) { + // string + val = la + i ++ + } + } + + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + continue + } + + if (argType === String) { + if (la === undefined) { + la = "" + } else if (la.match(/^-{1,2}[^-]+/)) { + la = "" + i -- + } + } + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i -- + } + + val = la === undefined ? true : la + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + i ++ + continue + } + remain.push(arg) + } +} + +function resolveShort (arg, shorthands, shortAbbr, abbrevs) { + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) + return null + + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] + } + + // first check to see if this arg is a set of single-char shorthands + var singles = shorthands.___singles + if (!singles) { + singles = Object.keys(shorthands).filter(function (s) { + return s.length === 1 + }).reduce(function (l,r) { + l[r] = true + return l + }, {}) + shorthands.___singles = singles + debug('shorthand singles', singles) + } + + var chrs = arg.split("").filter(function (c) { + return singles[c] + }) + + if (chrs.join("") === arg) return chrs.map(function (c) { + return shorthands[c] + }).reduce(function (l, r) { + return l.concat(r) + }, []) + + + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) + return null + + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) + arg = shortAbbr[arg] + + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] +} diff --git a/node_modules/node-pre-gyp/node_modules/nopt/package.json b/node_modules/node-pre-gyp/node_modules/nopt/package.json new file mode 100644 index 00000000..fae2e153 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/package.json @@ -0,0 +1,23 @@ +{ + "name": "nopt", + "version": "4.0.1", + "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "main": "lib/nopt.js", + "scripts": { + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/nopt.git" + }, + "bin": "./bin/nopt.js", + "license": "ISC", + "dependencies": { + "abbrev": "1", + "osenv": "^0.1.4" + }, + "devDependencies": { + "tap": "^8.0.1" + } +} diff --git a/node_modules/node-pre-gyp/node_modules/nopt/test/basic.js b/node_modules/node-pre-gyp/node_modules/nopt/test/basic.js new file mode 100644 index 00000000..5c18ac0f --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/nopt/test/basic.js @@ -0,0 +1,303 @@ +var nopt = require("../") + , test = require('tap').test + , isWin = process.platform === 'win32' + +test("passing a string results in a string", function (t) { + var parsed = nopt({ key: String }, {}, ["--key", "myvalue"], 0) + t.same(parsed.key, "myvalue") + t.end() +}) + +// https://github.com/npm/nopt/issues/31 +test("Empty String results in empty string, not true", function (t) { + var parsed = nopt({ empty: String }, {}, ["--empty"], 0) + t.same(parsed.empty, "") + t.end() +}) + +// https://github.com/npm/nopt/issues/65 +test("Empty String should not swallow next flag", function (t) { + var parsed = nopt({ empty: String, foo: String }, {}, ["--empty", "--foo"], 0) + t.same(parsed.empty, "") + t.same(parsed.foo, "") + t.end() +}) + +// https://github.com/npm/nopt/issues/66 +test("Empty String should not be true when type is single item Array", function (t) { + var parsed = nopt({ 'foo': [String] }, {}, ["--foo"], 0) + t.same(parsed.foo, "") + t.end() +}) + +test("~ path is resolved to " + (isWin ? '%USERPROFILE%' : '$HOME'), function (t) { + var path = require("path") + , the + + if (isWin) { + the = { + key: 'USERPROFILE', + dir: 'C:\\temp', + val: '~\\val' + } + } else { + the = { + key: 'HOME', + dir: '/tmp', + val: '~/val' + } + } + if (!process.env[the.key]) process.env[the.key] = v.dir + var parsed = nopt({key: path}, {}, ["--key=" + the.val], 0) + t.same(parsed.key, path.resolve(process.env[the.key], "val")) + t.end() +}) + +// https://github.com/npm/nopt/issues/24 +test("Unknown options are not parsed as numbers", function (t) { + var parsed = nopt({"parse-me": Number}, null, ['--leave-as-is=1.20', '--parse-me=1.20'], 0) + t.equal(parsed['leave-as-is'], '1.20') + t.equal(parsed['parse-me'], 1.2) + t.end() +}); + +// https://github.com/npm/nopt/issues/48 +test("Check types based on name of type", function (t) { + var parsed = nopt({"parse-me": {name: "Number"}}, null, ['--parse-me=1.20'], 0) + t.equal(parsed['parse-me'], 1.2) + t.end() +}) + + +test("Missing types are not parsed", function (t) { + var parsed = nopt({"parse-me": {}}, null, ['--parse-me=1.20'], 0) + //should only contain argv + t.equal(Object.keys(parsed).length, 1) + t.end() +}) + +test("Types passed without a name are not parsed", function (t) { + var parsed = nopt({"parse-me": {}}, {}, ['--parse-me=1.20'], 0) + //should only contain argv + t.equal(Object.keys(parsed).length, 1) + t.end() +}) + +test("other tests", function (t) { + + var util = require("util") + , Stream = require("stream") + , path = require("path") + , url = require("url") + + , shorthands = + { s : ["--loglevel", "silent"] + , d : ["--loglevel", "info"] + , dd : ["--loglevel", "verbose"] + , ddd : ["--loglevel", "silly"] + , noreg : ["--no-registry"] + , reg : ["--registry"] + , "no-reg" : ["--no-registry"] + , silent : ["--loglevel", "silent"] + , verbose : ["--loglevel", "verbose"] + , h : ["--usage"] + , H : ["--usage"] + , "?" : ["--usage"] + , help : ["--usage"] + , v : ["--version"] + , f : ["--force"] + , desc : ["--description"] + , "no-desc" : ["--no-description"] + , "local" : ["--no-global"] + , l : ["--long"] + , p : ["--parseable"] + , porcelain : ["--parseable"] + , g : ["--global"] + } + + , types = + { aoa: Array + , nullstream: [null, Stream] + , date: Date + , str: String + , browser : String + , cache : path + , color : ["always", Boolean] + , depth : Number + , description : Boolean + , dev : Boolean + , editor : path + , force : Boolean + , global : Boolean + , globalconfig : path + , group : [String, Number] + , gzipbin : String + , logfd : [Number, Stream] + , loglevel : ["silent","win","error","warn","info","verbose","silly"] + , long : Boolean + , "node-version" : [false, String] + , npaturl : url + , npat : Boolean + , "onload-script" : [false, String] + , outfd : [Number, Stream] + , parseable : Boolean + , pre: Boolean + , prefix: path + , proxy : url + , "rebuild-bundle" : Boolean + , registry : url + , searchopts : String + , searchexclude: [null, String] + , shell : path + , t: [Array, String] + , tag : String + , tar : String + , tmp : path + , "unsafe-perm" : Boolean + , usage : Boolean + , user : String + , username : String + , userconfig : path + , version : Boolean + , viewer: path + , _exit : Boolean + , path: path + } + + ; [["-v", {version:true}, []] + ,["---v", {version:true}, []] + ,["ls -s --no-reg connect -d", + {loglevel:"info",registry:null},["ls","connect"]] + ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]] + ,["ls --registry blargle", {}, ["ls"]] + ,["--no-registry", {registry:null}, []] + ,["--no-color true", {color:false}, []] + ,["--no-color false", {color:true}, []] + ,["--no-color", {color:false}, []] + ,["--color false", {color:false}, []] + ,["--color --logfd 7", {logfd:7,color:true}, []] + ,["--color=true", {color:true}, []] + ,["--logfd=10", {logfd:10}, []] + ,["--tmp=/tmp -tar=gtar", {tmp: isWin ? "C:\\tmp" : "/tmp",tar:"gtar"},[]] + ,["--tmp=tmp -tar=gtar", + {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]] + ,["--logfd x", {}, []] + ,["a -true -- -no-false", {true:true},["a","-no-false"]] + ,["a -no-false", {false:false},["a"]] + ,["a -no-no-true", {true:true}, ["a"]] + ,["a -no-no-no-false", {false:false}, ["a"]] + ,["---NO-no-No-no-no-no-nO-no-no"+ + "-No-no-no-no-no-no-no-no-no"+ + "-no-no-no-no-NO-NO-no-no-no-no-no-no"+ + "-no-body-can-do-the-boogaloo-like-I-do" + ,{"body-can-do-the-boogaloo-like-I-do":false}, []] + ,["we are -no-strangers-to-love "+ + "--you-know=the-rules --and=so-do-i "+ + "---im-thinking-of=a-full-commitment "+ + "--no-you-would-get-this-from-any-other-guy "+ + "--no-gonna-give-you-up "+ + "-no-gonna-let-you-down=true "+ + "--no-no-gonna-run-around false "+ + "--desert-you=false "+ + "--make-you-cry false "+ + "--no-tell-a-lie "+ + "--no-no-and-hurt-you false" + ,{"strangers-to-love":false + ,"you-know":"the-rules" + ,"and":"so-do-i" + ,"you-would-get-this-from-any-other-guy":false + ,"gonna-give-you-up":false + ,"gonna-let-you-down":false + ,"gonna-run-around":false + ,"desert-you":false + ,"make-you-cry":false + ,"tell-a-lie":false + ,"and-hurt-you":false + },["we", "are"]] + ,["-t one -t two -t three" + ,{t: ["one", "two", "three"]} + ,[]] + ,["-t one -t null -t three four five null" + ,{t: ["one", "null", "three"]} + ,["four", "five", "null"]] + ,["-t foo" + ,{t:["foo"]} + ,[]] + ,["--no-t" + ,{t:["false"]} + ,[]] + ,["-no-no-t" + ,{t:["true"]} + ,[]] + ,["-aoa one -aoa null -aoa 100" + ,{aoa:["one", null, '100']} + ,[]] + ,["-str 100" + ,{str:"100"} + ,[]] + ,["--color always" + ,{color:"always"} + ,[]] + ,["--no-nullstream" + ,{nullstream:null} + ,[]] + ,["--nullstream false" + ,{nullstream:null} + ,[]] + ,["--notadate=2011-01-25" + ,{notadate: "2011-01-25"} + ,[]] + ,["--date 2011-01-25" + ,{date: new Date("2011-01-25")} + ,[]] + ,["-cl 1" + ,{config: true, length: 1} + ,[] + ,{config: Boolean, length: Number, clear: Boolean} + ,{c: "--config", l: "--length"}] + ,["--acount bla" + ,{"acount":true} + ,["bla"] + ,{account: Boolean, credentials: Boolean, options: String} + ,{a:"--account", c:"--credentials",o:"--options"}] + ,["--clear" + ,{clear:true} + ,[] + ,{clear:Boolean,con:Boolean,len:Boolean,exp:Boolean,add:Boolean,rep:Boolean} + ,{c:"--con",l:"--len",e:"--exp",a:"--add",r:"--rep"}] + ,["--file -" + ,{"file":"-"} + ,[] + ,{file:String} + ,{}] + ,["--file -" + ,{"file":true} + ,["-"] + ,{file:Boolean} + ,{}] + ,["--path" + ,{"path":null} + ,[]] + ,["--path ." + ,{"path":process.cwd()} + ,[]] + ].forEach(function (test) { + var argv = test[0].split(/\s+/) + , opts = test[1] + , rem = test[2] + , actual = nopt(test[3] || types, test[4] || shorthands, argv, 0) + , parsed = actual.argv + delete actual.argv + for (var i in opts) { + var e = JSON.stringify(opts[i]) + , a = JSON.stringify(actual[i] === undefined ? null : actual[i]) + if (e && typeof e === "object") { + t.deepEqual(e, a) + } else { + t.equal(e, a) + } + } + t.deepEqual(rem, parsed.remain) + }) + t.end() +}) diff --git a/node_modules/node-pre-gyp/package.json b/node_modules/node-pre-gyp/package.json new file mode 100644 index 00000000..7da14294 --- /dev/null +++ b/node_modules/node-pre-gyp/package.json @@ -0,0 +1,52 @@ +{ + "name": "node-pre-gyp", + "description": "Node.js native addon binary install tool", + "version": "0.12.0", + "keywords": [ + "native", + "addon", + "module", + "c", + "c++", + "bindings", + "binary" + ], + "license": "BSD-3-Clause", + "author": "Dane Springmeyer ", + "repository": { + "type": "git", + "url": "git://github.com/mapbox/node-pre-gyp.git" + }, + "bin": "./bin/node-pre-gyp", + "main": "./lib/node-pre-gyp.js", + "dependencies": { + "detect-libc": "^1.0.2", + "mkdirp": "^0.5.1", + "needle": "^2.2.1", + "nopt": "^4.0.1", + "npm-packlist": "^1.1.6", + "npmlog": "^4.0.2", + "rc": "^1.2.7", + "rimraf": "^2.6.1", + "semver": "^5.3.0", + "tar": "^4" + }, + "devDependencies": { + "aws-sdk": "^2.28.0", + "jshint": "^2.9.5", + "nock": "^9.2.3", + "tape": "^4.6.3" + }, + "jshintConfig": { + "node": true, + "globalstrict": true, + "undef": true, + "unused": false, + "noarg": true + }, + "scripts": { + "pretest": "jshint test/build.test.js test/s3_setup.test.js test/versioning.test.js test/fetch.test.js lib lib/util scripts bin/node-pre-gyp", + "update-crosswalk": "node scripts/abi_crosswalk.js", + "test": "jshint lib lib/util scripts bin/node-pre-gyp && tape test/*test.js" + } +} diff --git a/node_modules/nodemon/.jscsrc b/node_modules/nodemon/.jscsrc new file mode 100644 index 00000000..5ce64e04 --- /dev/null +++ b/node_modules/nodemon/.jscsrc @@ -0,0 +1,13 @@ +{ + "preset": "node-style-guide", + "requireCapitalizedComments": null, + "requireSpacesInAnonymousFunctionExpression": { + "beforeOpeningCurlyBrace": true, + "beforeOpeningRoundBrace": true + }, + "disallowSpacesInNamedFunctionExpression": { + "beforeOpeningRoundBrace": true + }, + "excludeFiles": ["node_modules/**"], + "disallowSpacesInFunction": null +} \ No newline at end of file diff --git a/node_modules/nodemon/.jshintrc b/node_modules/nodemon/.jshintrc new file mode 100644 index 00000000..fb991ae8 --- /dev/null +++ b/node_modules/nodemon/.jshintrc @@ -0,0 +1,16 @@ +{ + "browser": true, + "camelcase": true, + "curly": true, + "devel": true, + "eqeqeq": true, + "forin": true, + "indent": 2, + "noarg": true, + "node": true, + "quotmark": "single", + "undef": true, + "strict": false, + "unused": true +} + diff --git a/node_modules/nodemon/.travis.yml b/node_modules/nodemon/.travis.yml new file mode 100644 index 00000000..b6b03bd3 --- /dev/null +++ b/node_modules/nodemon/.travis.yml @@ -0,0 +1,19 @@ +language: node_js +cache: + directories: + - ~/.npm +notifications: + email: false +node_js: + - '11' + - '10' + - '8' + - '6' + - '4' +before_install: + - if [ "$TRAVIS_PULL_REQUEST_BRANCH" == "" ]; then echo "//registry.npmjs.org/:_authToken=\${NPM_TOKEN}" >> .npmrc; fi +after_success: + - npm run semantic-release +branches: + except: + - /^v\d+\.\d+\.\d+$/ diff --git a/node_modules/nodemon/README.md b/node_modules/nodemon/README.md new file mode 100644 index 00000000..b5b3c2b1 --- /dev/null +++ b/node_modules/nodemon/README.md @@ -0,0 +1,379 @@ +

+ Nodemon Logo +

+ +# nodemon + +nodemon is a tool that helps develop node.js based applications by automatically restarting the node application when file changes in the directory are detected. + +nodemon does **not** require *any* additional changes to your code or method of development. nodemon is a replacement wrapper for `node`, to use `nodemon` replace the word `node` on the command line when executing your script. + +[![NPM version](https://badge.fury.io/js/nodemon.svg)](https://npmjs.org/package/nodemon) +[![Travis Status](https://travis-ci.org/remy/nodemon.svg?branch=master)](https://travis-ci.org/remy/nodemon) [![Backers on Open Collective](https://opencollective.com/nodemon/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/nodemon/sponsors/badge.svg)](#sponsors) + +# Installation + +Either through cloning with git or by using [npm](http://npmjs.org) (the recommended way): + +```bash +npm install -g nodemon +``` + +And nodemon will be installed globally to your system path. + +You can also install nodemon as a development dependency: + +```bash +npm install --save-dev nodemon +``` + +With a local installation, nodemon will not be available in your system path. Instead, the local installation of nodemon can be run by calling it from within an npm script (such as `npm start`) or using `npx nodemon`. + +# Usage + +nodemon wraps your application, so you can pass all the arguments you would normally pass to your app: + +```bash +nodemon [your node app] +``` + +For CLI options, use the `-h` (or `--help`) argument: + +```bash +nodemon -h +``` + +Using nodemon is simple, if my application accepted a host and port as the arguments, I would start it as so: + +```bash +nodemon ./server.js localhost 8080 +``` + +Any output from this script is prefixed with `[nodemon]`, otherwise all output from your application, errors included, will be echoed out as expected. + +If no script is given, nodemon will test for a `package.json` file and if found, will run the file associated with the *main* property ([ref](https://github.com/remy/nodemon/issues/14)). + +You can also pass the `inspect` flag to node through the command line as you would normally: + +```bash +nodemon --inspect ./server.js 80 +``` + +If you have a `package.json` file for your app, you can omit the main script entirely and nodemon will read the `package.json` for the `main` property and use that value as the app. + +nodemon will also search for the `scripts.start` property in `package.json` (as of nodemon 1.1.x). + +Also check out the [FAQ](https://github.com/remy/nodemon/blob/master/faq.md) or [issues](https://github.com/remy/nodemon/issues) for nodemon. + +## Automatic re-running + +nodemon was originally written to restart hanging processes such as web servers, but now supports apps that cleanly exit. If your script exits cleanly, nodemon will continue to monitor the directory (or directories) and restart the script if there are any changes. + +## Manual restarting + +Whilst nodemon is running, if you need to manually restart your application, instead of stopping and restart nodemon, you can type `rs` with a carriage return, and nodemon will restart your process. + +## Config files + +nodemon supports local and global configuration files. These are usually named `nodemon.json` and can be located in the current working directory or in your home directory. An alternative local configuration file can be specified with the `--config ` option. + +The specificity is as follows, so that a command line argument will always override the config file settings: + +- command line arguments +- local config +- global config + +A config file can take any of the command line arguments as JSON key values, for example: + +```json +{ + "verbose": true, + "ignore": ["*.test.js", "fixtures/*"], + "execMap": { + "rb": "ruby", + "pde": "processing --sketch={{pwd}} --run" + } +} +``` + +The above `nodemon.json` file might be my global config so that I have support for ruby files and processing files, and I can run `nodemon demo.pde` and nodemon will automatically know how to run the script even though out of the box support for processing scripts. + +A further example of options can be seen in [sample-nodemon.md](https://github.com/remy/nodemon/blob/master/doc/sample-nodemon.md) + +### package.json + +If you want to keep all your package configurations in one place, nodemon supports using `package.json` for configuration. +Specify the config in the same format as you would for a config file but under `nodemonConfig` in the `package.json` file, for example, take the following `package.json`: + +```json +{ + "name": "nodemon", + "homepage": "http://nodemon.io", + "...": "... other standard package.json values", + "nodemonConfig": { + "ignore": ["test/*", "docs/*"], + "delay": "2500" + } +} +``` + +Note that if you specify a `--config` file or provide a local `nodemon.json` any `package.json` config is ignored. + +*This section needs better documentation, but for now you can also see `nodemon --help config` ([also here](https://github.com/remy/nodemon/blob/master/doc/cli/config.txt))*. + +## Using nodemon as a module + +Please see [doc/requireable.md](doc/requireable.md) + +## Using nodemon as child process + +Please see [doc/events.md](doc/events.md#Using_nodemon_as_child_process) + +## Running non-node scripts + +nodemon can also be used to execute and monitor other programs. nodemon will read the file extension of the script being run and monitor that extension instead of `.js` if there's no `nodemon.json`: + +```bash +nodemon --exec "python -v" ./app.py +``` + +Now nodemon will run `app.py` with python in verbose mode (note that if you're not passing args to the exec program, you don't need the quotes), and look for new or modified files with the `.py` extension. + +### Default executables + +Using the `nodemon.json` config file, you can define your own default executables using the `execMap` property. This is particularly useful if you're working with a language that isn't supported by default by nodemon. + +To add support for nodemon to know about the `.pl` extension (for Perl), the `nodemon.json` file would add: + +```json +{ + "execMap": { + "pl": "perl" + } +} +``` + +Now running the following, nodemon will know to use `perl` as the executable: + +```bash +nodemon script.pl +``` + +It's generally recommended to use the global `nodemon.json` to add your own `execMap` options. However, if there's a common default that's missing, this can be merged in to the project so that nodemon supports it by default, by changing [default.js](https://github.com/remy/nodemon/blob/master/lib/config/defaults.js) and sending a pull request. + +## Monitoring multiple directories + +By default nodemon monitors the current working directory. If you want to take control of that option, use the `--watch` option to add specific paths: + +```bash +nodemon --watch app --watch libs app/server.js +``` + +Now nodemon will only restart if there are changes in the `./app` or `./libs` directory. By default nodemon will traverse sub-directories, so there's no need in explicitly including sub-directories. + +Don't use unix globbing to pass multiple directories, e.g `--watch ./lib/*`, it won't work. You need a `--watch` flag per directory watched. + +## Specifying extension watch list + +By default, nodemon looks for files with the `.js`, `.mjs`, `.coffee`, `.litcoffee`, and `.json` extensions. If you use the `--exec` option and monitor `app.py` nodemon will monitor files with the extension of `.py`. However, you can specify your own list with the `-e` (or `--ext`) switch like so: + +```bash +nodemon -e js,jade +``` + +Now nodemon will restart on any changes to files in the directory (or subdirectories) with the extensions `.js`, `.jade`. + +## Ignoring files + +By default, nodemon will only restart when a `.js` JavaScript file changes. In some cases you will want to ignore some specific files, directories or file patterns, to prevent nodemon from prematurely restarting your application. + +This can be done via the command line: + +```bash +nodemon --ignore lib/ --ignore tests/ +``` + +Or specific files can be ignored: + +```bash +nodemon --ignore lib/app.js +``` + +Patterns can also be ignored (but be sure to quote the arguments): + +```bash +nodemon --ignore 'lib/*.js' +``` + +Note that by default, nodemon will ignore the `.git`, `node_modules`, `bower_components`, `.nyc_output`, `coverage` and `.sass-cache` directories and *add* your ignored patterns to the list. If you want to indeed watch a directory like `node_modules`, you need to [override the underlying default ignore rules](https://github.com/remy/nodemon/blob/master/faq.md#overriding-the-underlying-default-ignore-rules). + +## Application isn't restarting + +In some networked environments (such as a container running nodemon reading across a mounted drive), you will need to use the `legacyWatch: true` which enables Chokidar's polling. + +Via the CLI, use either `--legacy-watch` or `-L` for short: + +```bash +nodemon -L +``` + +Though this should be a last resort as it will poll every file it can find. + +## Delaying restarting + +In some situations, you may want to wait until a number of files have changed. The timeout before checking for new file changes is 1 second. If you're uploading a number of files and it's taking some number of seconds, this could cause your app to restart multiple times unnecessarily. + +To add an extra throttle, or delay restarting, use the `--delay` command: + +```bash +nodemon --delay 10 server.js +``` + +For more precision, milliseconds can be specified. Either as a float: + +```bash +nodemon --delay 2.5 server.js +``` + +Or using the time specifier (ms): + +```bash +nodemon --delay 2500ms server.js +``` + +The delay figure is number of seconds (or milliseconds, if specified) to delay before restarting. So nodemon will only restart your app the given number of seconds after the *last* file change. + +If you are setting this value in `nodemon.json`, the value will always be interpreted in milliseconds. E.g., the following are equivalent: + +```bash +nodemon --delay 2.5 + +{ + "delay": "2500" +} +``` + +## Gracefully reloading down your script + +It is possible to have nodemon send any signal that you specify to your application. + +```bash +nodemon --signal SIGHUP server.js +``` + +Your application can handle the signal as follows. + +```js +process.once("SIGHUP", function () { + reloadSomeConfiguration(); +}) +``` + +Please note that nodemon will send this signal to every process in the process tree. + +If you are using `cluster`, then each workers (as well as the master) will receive the signal. If you wish to terminate all workers on receiving a `SIGHUP`, a common pattern is to catch the `SIGHUP` in the master, and forward `SIGTERM` to all workers, while ensuring that all workers ignore `SIGHUP`. + +```js +if (cluster.isMaster) { + process.on("SIGHUP", function () { + for (const worker of Object.values(cluster.workers)) { + worker.process.kill("SIGTERM"); + } + }); +} else { + process.on("SIGHUP", function() {}) +} +``` + +## Controlling shutdown of your script + +nodemon sends a kill signal to your application when it sees a file update. If you need to clean up on shutdown inside your script you can capture the kill signal and handle it yourself. + +The following example will listen once for the `SIGUSR2` signal (used by nodemon to restart), run the clean up process and then kill itself for nodemon to continue control: + +```js +process.once('SIGUSR2', function () { + gracefulShutdown(function () { + process.kill(process.pid, 'SIGUSR2'); + }); +}); +``` + +Note that the `process.kill` is *only* called once your shutdown jobs are complete. Hat tip to [Benjie Gillam](http://www.benjiegillam.com/2011/08/node-js-clean-restart-and-faster-development-with-nodemon/) for writing this technique up. + +## Triggering events when nodemon state changes + +If you want growl like notifications when nodemon restarts or to trigger an action when an event happens, then you can either `require` nodemon or add event actions to your `nodemon.json` file. + +For example, to trigger a notification on a Mac when nodemon restarts, `nodemon.json` looks like this: + +```json +{ + "events": { + "restart": "osascript -e 'display notification \"app restarted\" with title \"nodemon\"'" + } +} +``` + +A full list of available events is listed on the [event states wiki](https://github.com/remy/nodemon/wiki/Events#states). Note that you can bind to both states and messages. + +## Pipe output to somewhere else + +```js +nodemon({ + script: ..., + stdout: false // important: this tells nodemon not to output to console +}).on('readable', function() { // the `readable` event indicates that data is ready to pick up + this.stdout.pipe(fs.createWriteStream('output.txt')); + this.stderr.pipe(fs.createWriteStream('err.txt')); +}); +``` + +## Using nodemon in your gulp workflow + +Check out the [gulp-nodemon](https://github.com/JacksonGariety/gulp-nodemon) plugin to integrate nodemon with the rest of your project's gulp workflow. + +## Using nodemon in your Grunt workflow + +Check out the [grunt-nodemon](https://github.com/ChrisWren/grunt-nodemon) plugin to integrate nodemon with the rest of your project's grunt workflow. + +## Pronunciation + +> nodemon, is it pronounced: node-mon, no-demon or node-e-mon (like pokémon)? + +Well...I've been asked this many times before. I like that I've been asked this before. There's been bets as to which one it actually is. + +The answer is simple, but possibly frustrating. I'm not saying (how I pronounce it). It's up to you to call it as you like. All answers are correct :) + +## Design principles + +- Less flags is better +- Works across all platforms +- Less features +- Let individuals build on top of nodemon +- Offer all CLI functionality as an API +- Contributions must have and pass tests + +Nodemon is not perfect, and CLI arguments has sprawled beyond where I'm completely happy, but perhaps it can be reduced a little one day. + +## FAQ + +See the [FAQ](https://github.com/remy/nodemon/blob/master/faq.md) and please add your own questions if you think they would help others. + +## Backers + +Thank you to all [our backers](https://opencollective.com/nodemon#backer)! 🙏 + +[![nodemon backers](https://opencollective.com/nodemon/backers.svg?width=890)](https://opencollective.com/nodemon#backers) + +## Sponsors + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [Sponsor this project today ❤️](https://opencollective.com/nodemon#sponsor) + +[](https://sparkpo.st/nodemon) + +[](https://mixmax.com) + +# License + +MIT [http://rem.mit-license.org](http://rem.mit-license.org) diff --git a/node_modules/nodemon/bin/nodemon.js b/node_modules/nodemon/bin/nodemon.js new file mode 100755 index 00000000..31aff345 --- /dev/null +++ b/node_modules/nodemon/bin/nodemon.js @@ -0,0 +1,16 @@ +#!/usr/bin/env node + +const cli = require('../lib/cli'); +const nodemon = require('../lib/'); +const options = cli.parse(process.argv); + +nodemon(options); + +const fs = require('fs'); + +// checks for available update and returns an instance +const pkg = JSON.parse(fs.readFileSync(__dirname + '/../package.json')); + +if (pkg.version.indexOf('0.0.0') !== 0 && options.noUpdateNotifier !== true) { + require('update-notifier')({ pkg }).notify(); +} diff --git a/node_modules/nodemon/bin/postinstall.js b/node_modules/nodemon/bin/postinstall.js new file mode 100755 index 00000000..70d2a255 --- /dev/null +++ b/node_modules/nodemon/bin/postinstall.js @@ -0,0 +1,29 @@ +#!/usr/bin/env node + +function main() { + if (process.env.SUPPRESS_SUPPORT) { + return; + } + + try { + const Configstore = require('configstore'); + const pkg = require(__dirname + '/../package.json'); + const now = Date.now(); + + var week = 1000 * 60 * 60 * 24 * 7; + + // create a Configstore instance with an unique ID e.g. + // Package name and optionally some default values + const conf = new Configstore(pkg.name); + const last = conf.get('lastCheck'); + + if (!last || now - week > last) { + console.log('\u001b[32mLove nodemon? You can now support the project via the open collective:\u001b[22m\u001b[39m\n > \u001b[96m\u001b[1mhttps://opencollective.com/nodemon/donate\u001b[0m\n'); + conf.set('lastCheck', now); + } + } catch (e) { + console.log('\u001b[32mLove nodemon? You can now support the project via the open collective:\u001b[22m\u001b[39m\n > \u001b[96m\u001b[1mhttps://opencollective.com/nodemon/donate\u001b[0m\n'); + } +} + +main(); diff --git a/node_modules/nodemon/commitlint.config.js b/node_modules/nodemon/commitlint.config.js new file mode 100644 index 00000000..39955d64 --- /dev/null +++ b/node_modules/nodemon/commitlint.config.js @@ -0,0 +1,7 @@ +module.exports = { + rules: { + 'body-tense': [0, 'never', 0], + lang: 'eng', + }, + extends: ['@commitlint/config-angular'], +}; diff --git a/node_modules/nodemon/doc/cli/authors.txt b/node_modules/nodemon/doc/cli/authors.txt new file mode 100644 index 00000000..6c77a12a --- /dev/null +++ b/node_modules/nodemon/doc/cli/authors.txt @@ -0,0 +1,8 @@ + + Remy Sharp - author and maintainer + https://github.com/remy + https://twitter.com/rem + + Contributors: https://github.com/remy/nodemon/graphs/contributors ❤︎ + + Please help make nodemon better: https://github.com/remy/nodemon/ diff --git a/node_modules/nodemon/doc/cli/config.txt b/node_modules/nodemon/doc/cli/config.txt new file mode 100644 index 00000000..5de9bba5 --- /dev/null +++ b/node_modules/nodemon/doc/cli/config.txt @@ -0,0 +1,44 @@ + + Typically the options to control nodemon are passed in via the CLI and are + listed under: nodemon --help options + + nodemon can also be configured via a local and global config file: + + * $HOME/nodemon.json + * $PWD/nodemon.json OR --config + * nodemonConfig in package.json + + All config options in the .json file map 1-to-1 with the CLI options, so a + config could read as: + + { + "ext": "*.pde", + "verbose": true, + "exec": "processing --sketch=game --run" + } + + There are a limited number of variables available in the config (since you + could use backticks on the CLI to use a variable, backticks won't work in + the .json config). + + * {{pwd}} - the current directory + * {{filename}} - the filename you pass to nodemon + + For example: + + { + "ext": "*.pde", + "verbose": true, + "exec": "processing --sketch={{pwd}} --run" + } + + The global config file is useful for setting up default executables + instead of repeating the same option in each of your local configs: + + { + "verbose": true, + "execMap": { + "rb": "ruby", + "pde": "processing --sketch={{pwd}} --run" + } + } diff --git a/node_modules/nodemon/doc/cli/help.txt b/node_modules/nodemon/doc/cli/help.txt new file mode 100644 index 00000000..dd2ab811 --- /dev/null +++ b/node_modules/nodemon/doc/cli/help.txt @@ -0,0 +1,29 @@ + Usage: nodemon [options] [script.js] [args] + + Options: + + --config file ............ alternate nodemon.json config file to use + -e, --ext ................ extensions to look for, ie. js,jade,hbs. + -x, --exec app ........... execute script with "app", ie. -x "python -v". + -w, --watch path.......... watch directory "path" or files. use once for + each directory or file to watch. + -i, --ignore ............. ignore specific files or directories. + -V, --verbose ............ show detail on what is causing restarts. + -- ........... to tell nodemon stop slurping arguments. + + Note: if the script is omitted, nodemon will try to read "main" from + package.json and without a nodemon.json, nodemon will monitor .js, .mjs, .coffee, + .litcoffee, and .json by default. + + For advanced nodemon configuration use nodemon.json: nodemon --help config + See also the sample: https://github.com/remy/nodemon/wiki/Sample-nodemon.json + + Examples: + + $ nodemon server.js + $ nodemon -w ../foo server.js apparg1 apparg2 + $ nodemon --exec python app.py + $ nodemon --exec "make build" -e "styl hbs" + $ nodemon app.js -- --config # pass config to app.js + + \x1B[1mAll options are documented under: \x1B[4mnodemon --help options\x1B[0m diff --git a/node_modules/nodemon/doc/cli/logo.txt b/node_modules/nodemon/doc/cli/logo.txt new file mode 100644 index 00000000..150f97f5 --- /dev/null +++ b/node_modules/nodemon/doc/cli/logo.txt @@ -0,0 +1,20 @@ + ; ; + kO. x0 + KMX, .:x0kc. 'KMN + 0MMM0: 'oKMMMMMMMXd, ;OMMMX + oMMMMMWKOONMMMMMMMMMMMMMWOOKWMMMMMx + OMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMK. + .oWMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMd. + KMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMN + KMMMMMMMMMMMMMMW0k0WMMMMMMMMMMMMMMW + KMMMMMMMMMMMNk:. :xNMMMMMMMMMMMW + KMMMMMMMMMMK OMMMMMMMMMMW + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMMO xMMMMMMMMMMN + KMMMMMMMMMNc ;NMMMMMMMMMN + KMMMMMW0o' .lOWMMMMMN + KMMKd; ,oKMMN + kX: ,K0 \ No newline at end of file diff --git a/node_modules/nodemon/doc/cli/options.txt b/node_modules/nodemon/doc/cli/options.txt new file mode 100644 index 00000000..70f41c4a --- /dev/null +++ b/node_modules/nodemon/doc/cli/options.txt @@ -0,0 +1,36 @@ + +Configuration + --config .......... alternate nodemon.json config file to use + --exitcrash .............. exit on crash, allows nodemon to work with other watchers + -i, --ignore ............. ignore specific files or directories + --no-colors .............. disable color output + --signal ........ use specified kill signal instead of default (ex. SIGTERM) + -w, --watch dir........... watch directory "dir" or files. use once for each + directory or file to watch + --no-update-notifier ..... opt-out of update version check + +Execution + -C, --on-change-only ..... execute script on change only, not startup + --cwd .............. change into before running the script + -e, --ext ................ extensions to look for, ie. "js,jade,hbs" + -I, --no-stdin ........... nodemon passes stdin directly to child process + --spawn .................. force nodemon to use spawn (over fork) [node only] + -x, --exec app ........... execute script with "app", ie. -x "python -v" + -- ........... to tell nodemon stop slurping arguments + +Watching + -d, --delay n ............ debounce restart for "n" seconds + -L, --legacy-watch ....... use polling to watch for changes (typically needed + when watching over a network/Docker) + -P, --polling-interval ... combined with -L, milliseconds to poll for (default 100) + +Information + --dump ................... print full debug configuration + -h, --help ............... default help + --help ........... help on a specific feature. Try "--help topics" + -q, --quiet .............. minimise nodemon messages to start/stop only + -v, --version ............ current nodemon version + -V, --verbose ............ show detail on what is causing restarts + + +> Note that any unrecognised arguments are passed to the executing command. diff --git a/node_modules/nodemon/doc/cli/topics.txt b/node_modules/nodemon/doc/cli/topics.txt new file mode 100644 index 00000000..9fe3e2b5 --- /dev/null +++ b/node_modules/nodemon/doc/cli/topics.txt @@ -0,0 +1,8 @@ + + options .................. show all available nodemon options + config ................... default config options using nodemon.json + authors .................. contributors to this project + logo ..................... <3 + whoami ................... I, AM, NODEMON \o/ + + Please support https://github.com/remy/nodemon/ diff --git a/node_modules/nodemon/doc/cli/usage.txt b/node_modules/nodemon/doc/cli/usage.txt new file mode 100644 index 00000000..bca98b5e --- /dev/null +++ b/node_modules/nodemon/doc/cli/usage.txt @@ -0,0 +1,3 @@ + Usage: nodemon [nodemon options] [script.js] [args] + + See "nodemon --help" for more. diff --git a/node_modules/nodemon/doc/cli/whoami.txt b/node_modules/nodemon/doc/cli/whoami.txt new file mode 100644 index 00000000..efc3382e --- /dev/null +++ b/node_modules/nodemon/doc/cli/whoami.txt @@ -0,0 +1,9 @@ +__/\\\\\_____/\\\_______/\\\\\_______/\\\\\\\\\\\\_____/\\\\\\\\\\\\\\\__/\\\\____________/\\\\_______/\\\\\_______/\\\\\_____/\\\_ + _\/\\\\\\___\/\\\_____/\\\///\\\____\/\\\////////\\\__\/\\\///////////__\/\\\\\\________/\\\\\\_____/\\\///\\\____\/\\\\\\___\/\\\_ + _\/\\\/\\\__\/\\\___/\\\/__\///\\\__\/\\\______\//\\\_\/\\\_____________\/\\\//\\\____/\\\//\\\___/\\\/__\///\\\__\/\\\/\\\__\/\\\_ + _\/\\\//\\\_\/\\\__/\\\______\//\\\_\/\\\_______\/\\\_\/\\\\\\\\\\\_____\/\\\\///\\\/\\\/_\/\\\__/\\\______\//\\\_\/\\\//\\\_\/\\\_ + _\/\\\\//\\\\/\\\_\/\\\_______\/\\\_\/\\\_______\/\\\_\/\\\///////______\/\\\__\///\\\/___\/\\\_\/\\\_______\/\\\_\/\\\\//\\\\/\\\_ + _\/\\\_\//\\\/\\\_\//\\\______/\\\__\/\\\_______\/\\\_\/\\\_____________\/\\\____\///_____\/\\\_\//\\\______/\\\__\/\\\_\//\\\/\\\_ + _\/\\\__\//\\\\\\__\///\\\__/\\\____\/\\\_______/\\\__\/\\\_____________\/\\\_____________\/\\\__\///\\\__/\\\____\/\\\__\//\\\\\\_ + _\/\\\___\//\\\\\____\///\\\\\/_____\/\\\\\\\\\\\\/___\/\\\\\\\\\\\\\\\_\/\\\_____________\/\\\____\///\\\\\/_____\/\\\___\//\\\\\_ + _\///_____\/////_______\/////_______\////////////_____\///////////////__\///______________\///_______\/////_______\///_____\/////__ \ No newline at end of file diff --git a/node_modules/nodemon/lib/cli/index.js b/node_modules/nodemon/lib/cli/index.js new file mode 100644 index 00000000..bf9e8099 --- /dev/null +++ b/node_modules/nodemon/lib/cli/index.js @@ -0,0 +1,49 @@ +var parse = require('./parse'); + +/** + * Converts a string to command line args, in particular + * groups together quoted values. + * This is a utility function to allow calling nodemon as a required + * library, but with the CLI args passed in (instead of an object). + * + * @param {String} string + * @return {Array} + */ +function stringToArgs(string) { + var args = []; + + var parts = string.split(' '); + var length = parts.length; + var i = 0; + var open = false; + var grouped = ''; + var lead = ''; + + for (; i < length; i++) { + lead = parts[i].substring(0, 1); + if (lead === '"' || lead === '\'') { + open = lead; + grouped = parts[i].substring(1); + } else if (open && parts[i].slice(-1) === open) { + open = false; + grouped += ' ' + parts[i].slice(0, -1); + args.push(grouped); + } else if (open) { + grouped += ' ' + parts[i]; + } else { + args.push(parts[i]); + } + } + + return args; +} + +module.exports = { + parse: function (argv) { + if (typeof argv === 'string') { + argv = stringToArgs(argv); + } + + return parse(argv); + }, +}; \ No newline at end of file diff --git a/node_modules/nodemon/lib/cli/parse.js b/node_modules/nodemon/lib/cli/parse.js new file mode 100644 index 00000000..ad740038 --- /dev/null +++ b/node_modules/nodemon/lib/cli/parse.js @@ -0,0 +1,230 @@ +/* + +nodemon is a utility for node, and replaces the use of the executable +node. So the user calls `nodemon foo.js` instead. + +nodemon can be run in a number of ways: + +`nodemon` - tries to use package.json#main property to run +`nodemon` - if no package, looks for index.js +`nodemon app.js` - runs app.js +`nodemon --arg app.js --apparg` - eats arg1, and runs app.js with apparg +`nodemon --apparg` - as above, but passes apparg to package.json#main (or + index.js) +`nodemon --debug app.js + +*/ + +var fs = require('fs'); +var path = require('path'); +var existsSync = fs.existsSync || path.existsSync; + +module.exports = parse; + +/** + * Parses the command line arguments `process.argv` and returns the + * nodemon options, the user script and the executable script. + * + * @param {Array} full process arguments, including `node` leading arg + * @return {Object} { options, script, args } + */ +function parse(argv) { + if (typeof argv === 'string') { + argv = argv.split(' '); + } + + var eat = function (i, args) { + if (i <= args.length) { + return args.splice(i + 1, 1).pop(); + } + }; + + var args = argv.slice(2); + var script = null; + var nodemonOptions = { scriptPosition: null }; + + var nodemonOpt = nodemonOption.bind(null, nodemonOptions); + var lookForArgs = true; + + // move forward through the arguments + for (var i = 0; i < args.length; i++) { + // if the argument looks like a file, then stop eating + if (!script) { + if (args[i] === '.' || existsSync(args[i])) { + script = args.splice(i, 1).pop(); + + // we capture the position of the script because we'll reinsert it in + // the right place in run.js:command (though I'm not sure we should even + // take it out of the array in the first place, but this solves passing + // arguments to the exec process for now). + nodemonOptions.scriptPosition = i; + i--; + continue; + } + } + + if (lookForArgs) { + // respect the standard way of saying: hereafter belongs to my script + if (args[i] === '--') { + args.splice(i, 1); + nodemonOptions.scriptPosition = i; + // cycle back one argument, as we just ate this one up + i--; + + // ignore all further nodemon arguments + lookForArgs = false; + + // move to the next iteration + continue; + } + + if (nodemonOpt(args[i], eat.bind(null, i, args)) !== false) { + args.splice(i, 1); + // cycle back one argument, as we just ate this one up + i--; + } + } + } + + nodemonOptions.script = script; + nodemonOptions.args = args; + + return nodemonOptions; +} + + +/** + * Given an argument (ie. from process.argv), sets nodemon + * options and can eat up the argument value + * + * @param {Object} options object that will be updated + * @param {Sting} current argument from argv + * @param {Function} the callback to eat up the next argument in argv + * @return {Boolean} false if argument was not a nodemon arg + */ +function nodemonOption(options, arg, eatNext) { + // line separation on purpose to help legibility + if (arg === '--help' || arg === '-h' || arg === '-?') { + var help = eatNext(); + options.help = help ? help : true; + } else + + if (arg === '--version' || arg === '-v') { + options.version = true; + } else + + if (arg === '--no-update-notifier') { + options.noUpdateNotifier = true; + } else + + if (arg === '--spawn') { + options.spawn = true; + } else + + if (arg === '--dump') { + options.dump = true; + } else + + if (arg === '--verbose' || arg === '-V') { + options.verbose = true; + } else + + if (arg === '--legacy-watch' || arg === '-L') { + options.legacyWatch = true; + } else + + if (arg === '--polling-interval' || arg === '-P') { + options.pollingInterval = parseInt(eatNext(), 10); + } else + + // Depricated as this is "on" by default + if (arg === '--js') { + options.js = true; + } else + + if (arg === '--quiet' || arg === '-q') { + options.quiet = true; + } else + + if (arg === '--config') { + options.configFile = eatNext(); + } else + + if (arg === '--watch' || arg === '-w') { + if (!options.watch) { options.watch = []; } + options.watch.push(eatNext()); + } else + + if (arg === '--ignore' || arg === '-i') { + if (!options.ignore) { options.ignore = []; } + options.ignore.push(eatNext()); + } else + + if (arg === '--exitcrash') { + options.exitcrash = true; + } else + + if (arg === '--delay' || arg === '-d') { + options.delay = parseDelay(eatNext()); + } else + + if (arg === '--exec' || arg === '-x') { + options.exec = eatNext(); + } else + + if (arg === '--no-stdin' || arg === '-I') { + options.stdin = false; + } else + + if (arg === '--on-change-only' || arg === '-C') { + options.runOnChangeOnly = true; + } else + + if (arg === '--ext' || arg === '-e') { + options.ext = eatNext(); + } else + + if (arg === '--no-colours' || arg === '--no-colors') { + options.colours = false; + } else + + if (arg === '--signal' || arg === '-s') { + options.signal = eatNext(); + } else + + if (arg === '--cwd') { + options.cwd = eatNext(); + + // go ahead and change directory. This is primarily for nodemon tools like + // grunt-nodemon - we're doing this early because it will affect where the + // user script is searched for. + process.chdir(path.resolve(options.cwd)); + } else { + + // this means we didn't match + return false; + } +} + +/** + * Given an argument (ie. from nodemonOption()), will parse and return the + * equivalent millisecond value or 0 if the argument cannot be parsed + * + * @param {String} argument value given to the --delay option + * @return {Number} millisecond equivalent of the argument + */ +function parseDelay(value) { + var millisPerSecond = 1000; + var millis = 0; + + if (value.match(/^\d*ms$/)) { + // Explicitly parse for milliseconds when using ms time specifier + millis = parseInt(value, 10); + } else { + // Otherwise, parse for seconds, with or without time specifier then convert + millis = parseFloat(value) * millisPerSecond; + } + + return isNaN(millis) ? 0 : millis; +} + diff --git a/node_modules/nodemon/lib/config/command.js b/node_modules/nodemon/lib/config/command.js new file mode 100644 index 00000000..9839b5c7 --- /dev/null +++ b/node_modules/nodemon/lib/config/command.js @@ -0,0 +1,43 @@ +module.exports = command; + +/** + * command constructs the executable command to run in a shell including the + * user script, the command arguments. + * + * @param {Object} settings Object as: + * { execOptions: { + * exec: String, + * [script: String], + * [scriptPosition: Number], + * [execArgs: Array] + * } + * } + * @return {Object} an object with the node executable and the + * arguments to the command + */ +function command(settings) { + var options = settings.execOptions; + var executable = options.exec; + var args = []; + + // after "executable" go the exec args (like --debug, etc) + if (options.execArgs) { + [].push.apply(args, options.execArgs); + } + + // then goes the user's script arguments + if (options.args) { + [].push.apply(args, options.args); + } + + // after the "executable" goes the user's script + if (options.script) { + args.splice((options.scriptPosition || 0) + + options.execArgs.length, 0, options.script); + } + + return { + executable: executable, + args: args, + }; +} diff --git a/node_modules/nodemon/lib/config/defaults.js b/node_modules/nodemon/lib/config/defaults.js new file mode 100644 index 00000000..e2a448b4 --- /dev/null +++ b/node_modules/nodemon/lib/config/defaults.js @@ -0,0 +1,28 @@ +var ignoreRoot = require('ignore-by-default').directories(); + +// default options for config.options +module.exports = { + restartable: 'rs', + colours: true, + execMap: { + py: 'python', + rb: 'ruby', + ts: 'ts-node', + // more can be added here such as ls: lsc - but please ensure it's cross + // compatible with linux, mac and windows, or make the default.js + // dynamically append the `.cmd` for node based utilities + }, + ignoreRoot: ignoreRoot.map(_ => `**/${_}/**`), + watch: ['*.*'], + stdin: true, + runOnChangeOnly: false, + verbose: false, + signal: 'SIGUSR2', + // 'stdout' refers to the default behaviour of a required nodemon's child, + // but also includes stderr. If this is false, data is still dispatched via + // nodemon.on('stdout/stderr') + stdout: true, + watchOptions: { + + }, +}; diff --git a/node_modules/nodemon/lib/config/exec.js b/node_modules/nodemon/lib/config/exec.js new file mode 100644 index 00000000..951ac7a1 --- /dev/null +++ b/node_modules/nodemon/lib/config/exec.js @@ -0,0 +1,225 @@ +const path = require('path'); +const fs = require('fs'); +const existsSync = fs.existsSync; +const utils = require('../utils'); + +module.exports = exec; +module.exports.expandScript = expandScript; + +/** + * Reads the cwd/package.json file and looks to see if it can load a script + * and possibly an exec first from package.main, then package.start. + * + * @return {Object} exec & script if found + */ +function execFromPackage() { + // doing a try/catch because we can't use the path.exist callback pattern + // or we could, but the code would get messy, so this will do exactly + // what we're after - if the file doesn't exist, it'll throw. + try { + // note: this isn't nodemon's package, it's the user's cwd package + var pkg = require(path.join(process.cwd(), 'package.json')); + if (pkg.main !== undefined) { + // no app found to run - so give them a tip and get the feck out + return { exec: null, script: pkg.main }; + } + + if (pkg.scripts && pkg.scripts.start) { + return { exec: pkg.scripts.start }; + } + } catch (e) { } + + return null; +} + +function replace(map, str) { + var re = new RegExp('{{(' + Object.keys(map).join('|') + ')}}', 'g'); + return str.replace(re, function (all, m) { + return map[m] || all || ''; + }); +} + +function expandScript(script, ext) { + if (!ext) { + ext = '.js'; + } + if (script.indexOf(ext) !== -1) { + return script; + } + + if (existsSync(path.resolve(script))) { + return script; + } + + if (existsSync(path.resolve(script + ext))) { + return script + ext; + } + + return script; +} + +/** + * Discovers all the options required to run the script + * and if a custom exec has been passed in, then it will + * also try to work out what extensions to monitor and + * whether there's a special way of running that script. + * + * @param {Object} nodemonOptions + * @param {Object} execMap + * @return {Object} new and updated version of nodemonOptions + */ +function exec(nodemonOptions, execMap) { + if (!execMap) { + execMap = {}; + } + + var options = utils.clone(nodemonOptions || {}); + var script; + + // if there's no script passed, try to get it from the first argument + if (!options.script && (options.args || []).length) { + script = expandScript(options.args[0], + options.ext && ('.' + (options.ext || 'js').split(',')[0])); + + // if the script was found, shift it off our args + if (script !== options.args[0]) { + options.script = script; + options.args.shift(); + } + } + + // if there's no exec found yet, then try to read it from the local + // package.json this logic used to sit in the cli/parse, but actually the cli + // should be parsed first, then the user options (via nodemon.json) then + // finally default down to pot shots at the directory via package.json + if (!options.exec && !options.script) { + var found = execFromPackage(); + if (found !== null) { + if (found.exec) { + options.exec = found.exec; + } + if (!options.script) { + options.script = found.script; + } + if (Array.isArray(options.args) && + options.scriptPosition === null) { + options.scriptPosition = options.args.length; + } + } + } + + // var options = utils.clone(nodemonOptions || {}); + script = path.basename(options.script || ''); + + var scriptExt = path.extname(script).slice(1); + + var extension = options.ext; + if (extension === undefined) { + var isJS = scriptExt === 'js' || scriptExt === 'mjs'; + extension = (isJS || !scriptExt) ? 'js,mjs' : scriptExt; + extension += ',json'; // Always watch JSON files + } + + var execDefined = !!options.exec; + + // allows the user to simplify cli usage: + // https://github.com/remy/nodemon/issues/195 + // but always give preference to the user defined argument + if (!options.exec && execMap[scriptExt] !== undefined) { + options.exec = execMap[scriptExt]; + execDefined = true; + } + + options.execArgs = nodemonOptions.execArgs || []; + + if (Array.isArray(options.exec)) { + options.execArgs = options.exec; + options.exec = options.execArgs.shift(); + } + + if (options.exec === undefined) { + options.exec = 'node'; + } else { + // allow variable substitution for {{filename}} and {{pwd}} + var substitution = replace.bind(null, { + filename: options.script, + pwd: process.cwd(), + }); + + var newExec = substitution(options.exec); + if (newExec !== options.exec && + options.exec.indexOf('{{filename}}') !== -1) { + options.script = null; + } + options.exec = newExec; + + var newExecArgs = options.execArgs.map(substitution); + if (newExecArgs.join('') !== options.execArgs.join('')) { + options.execArgs = newExecArgs; + delete options.script; + } + } + + + if (options.exec === 'node' && options.nodeArgs && options.nodeArgs.length) { + options.execArgs = options.execArgs.concat(options.nodeArgs); + } + + // note: indexOf('coffee') handles both .coffee and .litcoffee + if (!execDefined && options.exec === 'node' && + scriptExt.indexOf('coffee') !== -1) { + options.exec = 'coffee'; + + // we need to get execArgs set before the script + // for example, in `nodemon --debug my-script.coffee --my-flag`, debug is an + // execArg, while my-flag is a script arg + var leadingArgs = (options.args || []).splice(0, options.scriptPosition); + options.execArgs = options.execArgs.concat(leadingArgs); + options.scriptPosition = 0; + + if (options.execArgs.length > 0) { + // because this is the coffee executable, we need to combine the exec args + // into a single argument after the nodejs flag + options.execArgs = ['--nodejs', options.execArgs.join(' ')]; + } + } + + if (options.exec === 'coffee') { + // don't override user specified extension tracking + if (options.ext === undefined) { + if (extension) { extension += ','; } + extension += 'coffee,litcoffee'; + } + + // because windows can't find 'coffee', it needs the real file 'coffee.cmd' + if (utils.isWindows) { + options.exec += '.cmd'; + } + } + + // allow users to make a mistake on the extension to monitor + // converts .js, jade => js,jade + // BIG NOTE: user can't do this: nodemon -e *.js + // because the terminal will automatically expand the glob against + // the file system :( + extension = (extension.match(/[^,*\s]+/g) || []) + .map(ext => ext.replace(/^\./, '')) + .join(','); + + options.ext = extension; + + if (options.script) { + options.script = expandScript(options.script, + extension && ('.' + extension.split(',')[0])); + } + + options.env = {}; + // make sure it's an object (and since we don't have ) + if (({}).toString.apply(nodemonOptions.env) === '[object Object]') { + options.env = utils.clone(nodemonOptions.env); + } else if (nodemonOptions.env !== undefined) { + throw new Error('nodemon env values must be an object: { PORT: 8000 }'); + } + + return options; +} diff --git a/node_modules/nodemon/lib/config/index.js b/node_modules/nodemon/lib/config/index.js new file mode 100644 index 00000000..e1f584dc --- /dev/null +++ b/node_modules/nodemon/lib/config/index.js @@ -0,0 +1,93 @@ +/** + * Manages the internal config of nodemon, checking for the state of support + * with fs.watch, how nodemon can watch files (using find or fs methods). + * + * This is *not* the user's config. + */ +var debug = require('debug')('nodemon'); +var load = require('./load'); +var rules = require('../rules'); +var utils = require('../utils'); +var pinVersion = require('../version').pin; +var command = require('./command'); +var rulesToMonitor = require('../monitor/match').rulesToMonitor; +var bus = utils.bus; + +function reset() { + rules.reset(); + + config.dirs = []; + config.options = { ignore: [], watch: [] }; + config.lastStarted = 0; + config.loaded = []; +} + +var config = { + run: false, + system: { + cwd: process.cwd(), + }, + required: false, + dirs: [], + timeout: 1000, + options: {}, +}; + +/** + * Take user defined settings, then detect the local machine capability, then + * look for local and global nodemon.json files and merge together the final + * settings with the config for nodemon. + * + * @param {Object} settings user defined settings for nodemon (typically on + * the cli) + * @param {Function} ready callback fired once the config is loaded + */ +config.load = function (settings, ready) { + reset(); + var config = this; + load(settings, config.options, config, function (options) { + config.options = options; + + if (options.watch.length === 0) { + // this is to catch when the watch is left blank + options.watch.push('*.*'); + } + + if (options['watch_interval']) { // jshint ignore:line + options.watchInterval = options['watch_interval']; // jshint ignore:line + } + + config.watchInterval = options.watchInterval || null; + if (options.signal) { + config.signal = options.signal; + } + + var cmd = command(config.options); + config.command = { + raw: cmd, + string: utils.stringify(cmd.executable, cmd.args), + }; + + // now run automatic checks on system adding to the config object + options.monitor = rulesToMonitor(options.watch, options.ignore, config); + + var cwd = process.cwd(); + debug('config: dirs', config.dirs); + if (config.dirs.length === 0) { + config.dirs.unshift(cwd); + } + + bus.emit('config:update', config); + pinVersion().then(function () { + ready(config); + }).catch(e => { + // this doesn't help testing, but does give exposure on syntax errors + console.error(e.stack); + setTimeout(() => { throw e; }, 0); + }); + }); +}; + +config.reset = reset; + +module.exports = config; diff --git a/node_modules/nodemon/lib/config/load.js b/node_modules/nodemon/lib/config/load.js new file mode 100644 index 00000000..ddec54fd --- /dev/null +++ b/node_modules/nodemon/lib/config/load.js @@ -0,0 +1,254 @@ +var debug = require('debug')('nodemon'); +var fs = require('fs'); +var path = require('path'); +var exists = fs.exists || path.exists; +var utils = require('../utils'); +var rules = require('../rules'); +var parse = require('../rules/parse'); +var exec = require('./exec'); +var defaults = require('./defaults'); + +module.exports = load; +module.exports.mutateExecOptions = mutateExecOptions; + +var existsSync = fs.existsSync || path.existsSync; + +function findAppScript() { + // nodemon has been run alone, so try to read the package file + // or try to read the index.js file + if (existsSync('./index.js')) { + return 'index.js'; + } +} + +/** + * Load the nodemon config, first reading the global root/nodemon.json, then + * the local nodemon.json to the exec and then overwriting using any user + * specified settings (i.e. from the cli) + * + * @param {Object} settings user defined settings + * @param {Function} ready callback that receives complete config + */ +function load(settings, options, config, callback) { + config.loaded = []; + // first load the root nodemon.json + loadFile(options, config, utils.home, function (options) { + // then load the user's local configuration file + if (settings.configFile) { + options.configFile = path.resolve(settings.configFile); + } + loadFile(options, config, process.cwd(), function (options) { + // Then merge over with the user settings (parsed from the cli). + // Note that merge protects and favours existing values over new values, + // and thus command line arguments get priority + options = utils.merge(settings, options); + + // legacy support + if (!Array.isArray(options.ignore)) { + options.ignore = [options.ignore]; + } + + if (!options.ignoreRoot) { + options.ignoreRoot = defaults.ignoreRoot; + } + + // blend the user ignore and the default ignore together + if (options.ignoreRoot && options.ignore) { + if (!Array.isArray(options.ignoreRoot)) { + options.ignoreRoot = [options.ignoreRoot]; + } + options.ignore = options.ignoreRoot.concat(options.ignore); + } else { + options.ignore = defaults.ignore.concat(options.ignore); + } + + + // add in any missing defaults + options = utils.merge(options, defaults); + + if (!options.script && !options.exec) { + var found = findAppScript(); + if (found) { + if (!options.args) { + options.args = []; + } + // if the script is found as a result of not being on the command + // line, then we move any of the pre double-dash args in execArgs + const n = options.scriptPosition || options.args.length; + options.execArgs = (options.execArgs || []) + .concat(options.args.splice(0, n)); + options.scriptPosition = null; + + options.script = found; + } + } + + mutateExecOptions(options); + + if (options.quiet) { + utils.quiet(); + } + + if (options.verbose) { + utils.debug = true; + } + + // simplify the ready callback to be called after the rules are normalised + // from strings to regexp through the rules lib. Note that this gets + // created *after* options is overwritten twice in the lines above. + var ready = function (options) { + normaliseRules(options, callback); + }; + + // if we didn't pick up a nodemon.json file & there's no cli ignores + // then try loading an old style .nodemonignore file + if (config.loaded.length === 0) { + var legacy = loadLegacyIgnore.bind(null, options, config, ready); + + // first try .nodemonignore, if that doesn't exist, try nodemon-ignore + return legacy('.nodemonignore', function () { + legacy('nodemon-ignore', function (options) { + ready(options); + }); + }); + } + + ready(options); + }); + }); +} + +/** + * Loads the old style nodemonignore files which is a list of patterns + * in a file to ignore + * + * @param {Object} options nodemon user options + * @param {Function} success + * @param {String} filename ignore file (.nodemonignore or nodemon-ignore) + * @param {Function} fail (optional) failure callback + */ +function loadLegacyIgnore(options, config, success, filename, fail) { + var ignoreFile = path.join(process.cwd(), filename); + + exists(ignoreFile, function (exists) { + if (exists) { + config.loaded.push(ignoreFile); + return parse(ignoreFile, function (error, rules) { + options.ignore = rules.raw; + success(options); + }); + } + + if (fail) { + fail(options); + } else { + success(options); + } + }); +} + +function normaliseRules(options, ready) { + // convert ignore and watch options to rules/regexp + rules.watch.add(options.watch); + rules.ignore.add(options.ignore); + + // normalise the watch and ignore arrays + options.watch = options.watch === false ? false : rules.rules.watch; + options.ignore = rules.rules.ignore; + + ready(options); +} + +/** + * Looks for a config in the current working directory, and a config in the + * user's home directory, merging the two together, giving priority to local + * config. This can then be overwritten later by command line arguments + * + * @param {Function} ready callback to pass loaded settings to + */ +function loadFile(options, config, dir, ready) { + if (!ready) { + ready = function () { }; + } + + var callback = function (settings) { + // prefer the local nodemon.json and fill in missing items using + // the global options + ready(utils.merge(settings, options)); + }; + + if (!dir) { + return callback({}); + } + + var filename = options.configFile || path.join(dir, 'nodemon.json'); + + if (config.loaded.indexOf(filename) !== -1) { + // don't bother re-parsing the same config file + return callback({}); + } + + fs.readFile(filename, 'utf8', function (err, data) { + if (err) { + if (err.code === 'ENOENT') { + if (!options.configFile && dir !== utils.home) { + // if no specified local config file and local nodemon.json + // doesn't exist, try the package.json + return loadPackageJSON(config, callback); + } + } + return callback({}); + } + + var settings = {}; + + try { + settings = JSON.parse(data.toString('utf8').replace(/^\uFEFF/, '')); + if (!filename.endsWith('package.json') || settings.nodemonConfig) { + config.loaded.push(filename); + } + } catch (e) { + utils.log.fail('Failed to parse config ' + filename); + console.error(e); + process.exit(1); + } + + // options values will overwrite settings + callback(settings); + }); +} + +function loadPackageJSON(config, ready) { + if (!ready) { + ready = () => { }; + } + + const dir = process.cwd(); + const filename = path.join(dir, 'package.json'); + const packageLoadOptions = { configFile: filename }; + return loadFile(packageLoadOptions, config, dir, settings => { + ready(settings.nodemonConfig || {}); + }); +} + +function mutateExecOptions(options) { + // work out the execOptions based on the final config we have + options.execOptions = exec({ + script: options.script, + exec: options.exec, + args: options.args, + scriptPosition: options.scriptPosition, + nodeArgs: options.nodeArgs, + execArgs: options.execArgs, + ext: options.ext, + env: options.env, + }, options.execMap); + + // clean up values that we don't need at the top level + delete options.scriptPosition; + delete options.script; + delete options.args; + delete options.ext; + + return options; +} diff --git a/node_modules/nodemon/lib/help/index.js b/node_modules/nodemon/lib/help/index.js new file mode 100644 index 00000000..1054b602 --- /dev/null +++ b/node_modules/nodemon/lib/help/index.js @@ -0,0 +1,27 @@ +var fs = require('fs'); +var path = require('path'); +const supportsColor = require('supports-color'); + +module.exports = help; + +const highlight = supportsColor.stdout ? '\x1B\[$1m' : ''; + +function help(item) { + if (!item) { + item = 'help'; + } else if (item === true) { // if used with -h or --help and no args + item = 'help'; + } + + // cleanse the filename to only contain letters + // aka: /\W/g but figured this was eaiser to read + item = item.replace(/[^a-z]/gi, ''); + + try { + var dir = path.join(__dirname, '..', '..', 'doc', 'cli', item + '.txt'); + var body = fs.readFileSync(dir, 'utf8'); + return body.replace(/\\x1B\[(.)m/g, highlight); + } catch (e) { + return '"' + item + '" help can\'t be found'; + } +} diff --git a/node_modules/nodemon/lib/index.js b/node_modules/nodemon/lib/index.js new file mode 100644 index 00000000..0eca5c45 --- /dev/null +++ b/node_modules/nodemon/lib/index.js @@ -0,0 +1 @@ +module.exports = require('./nodemon'); \ No newline at end of file diff --git a/node_modules/nodemon/lib/monitor/index.js b/node_modules/nodemon/lib/monitor/index.js new file mode 100644 index 00000000..89db029b --- /dev/null +++ b/node_modules/nodemon/lib/monitor/index.js @@ -0,0 +1,4 @@ +module.exports = { + run: require('./run'), + watch: require('./watch').watch, +}; diff --git a/node_modules/nodemon/lib/monitor/match.js b/node_modules/nodemon/lib/monitor/match.js new file mode 100644 index 00000000..3261ced1 --- /dev/null +++ b/node_modules/nodemon/lib/monitor/match.js @@ -0,0 +1,269 @@ +const minimatch = require('minimatch'); +const path = require('path'); +const fs = require('fs'); +const debug = require('debug')('nodemon:match'); +const utils = require('../utils'); + +module.exports = match; +module.exports.rulesToMonitor = rulesToMonitor; + +function rulesToMonitor(watch, ignore, config) { + var monitor = []; + + if (!Array.isArray(ignore)) { + if (ignore) { + ignore = [ignore]; + } else { + ignore = []; + } + } + + if (!Array.isArray(watch)) { + if (watch) { + watch = [watch]; + } else { + watch = []; + } + } + + if (watch && watch.length) { + monitor = utils.clone(watch); + } + + if (ignore) { + [].push.apply(monitor, (ignore || []).map(function (rule) { + return '!' + rule; + })); + } + + var cwd = process.cwd(); + + // next check if the monitored paths are actual directories + // or just patterns - and expand the rule to include *.* + monitor = monitor.map(function (rule) { + var not = rule.slice(0, 1) === '!'; + + if (not) { + rule = rule.slice(1); + } + + if (rule === '.' || rule === '.*') { + rule = '*.*'; + } + + var dir = path.resolve(cwd, rule); + + try { + var stat = fs.statSync(dir); + if (stat.isDirectory()) { + rule = dir; + if (rule.slice(-1) !== '/') { + rule += '/'; + } + rule += '**/*'; + + // `!not` ... sorry. + if (!not) { + config.dirs.push(dir); + } + } else { + // ensures we end up in the check that tries to get a base directory + // and then adds it to the watch list + throw new Error(); + } + } catch (e) { + var base = tryBaseDir(dir); + if (!not && base) { + if (config.dirs.indexOf(base) === -1) { + config.dirs.push(base); + } + } + } + + if (rule.slice(-1) === '/') { + // just slap on a * anyway + rule += '*'; + } + + // if the url ends with * but not **/* and not *.* + // then convert to **/* - somehow it was missed :-\ + if (rule.slice(-4) !== '**/*' && + rule.slice(-1) === '*' && + rule.indexOf('*.') === -1) { + + if (rule.slice(-2) !== '**') { + rule += '*/*'; + } + } + + + return (not ? '!' : '') + rule; + }); + + return monitor; +} + +function tryBaseDir(dir) { + var stat; + if (/[?*\{\[]+/.test(dir)) { // if this is pattern, then try to find the base + try { + var base = path.dirname(dir.replace(/([?*\{\[]+.*$)/, 'foo')); + stat = fs.statSync(base); + if (stat.isDirectory()) { + return base; + } + } catch (error) { + // console.log(error); + } + } else { + try { + stat = fs.statSync(dir); + // if this path is actually a single file that exists, then just monitor + // that, *specifically*. + if (stat.isFile() || stat.isDirectory()) { + return dir; + } + } catch (e) { } + } + + return false; +} + +function match(files, monitor, ext) { + // sort the rules by highest specificity (based on number of slashes) + // ignore rules (!) get sorted highest as they take precedent + const cwd = process.cwd(); + var rules = monitor.sort(function (a, b) { + var r = b.split(path.sep).length - a.split(path.sep).length; + var aIsIgnore = a.slice(0, 1) === '!'; + var bIsIgnore = b.slice(0, 1) === '!'; + + if (aIsIgnore || bIsIgnore) { + if (aIsIgnore) { + return -1; + } + + return 1; + } + + if (r === 0) { + return b.length - a.length; + } + return r; + }).map(function (s) { + var prefix = s.slice(0, 1); + + if (prefix === '!') { + if (s.indexOf('!' + cwd) === 0) { + return s; + } + return '!**' + (prefix !== path.sep ? path.sep : '') + s.slice(1); + } + + // if it starts with a period, then let's get the relative path + if (s.indexOf('.') === 0) { + return path.resolve(cwd, s); + } + + if (s.indexOf(cwd) === 0) { + return s; + } + + return '**' + (prefix !== path.sep ? path.sep : '') + s; + }); + + debug('rules', rules); + + var good = []; + var whitelist = []; // files that we won't check against the extension + var ignored = 0; + var watched = 0; + var usedRules = []; + var minimatchOpts = { + dot: true, + }; + + // enable case-insensitivity on Windows + if (utils.isWindows) { + minimatchOpts.nocase = true; + } + + files.forEach(function (file) { + file = path.resolve(cwd, file); + + var matched = false; + for (var i = 0; i < rules.length; i++) { + if (rules[i].slice(0, 1) === '!') { + if (!minimatch(file, rules[i], minimatchOpts)) { + ignored++; + matched = true; + break; + } + } else { + debug('match', file, minimatch(file, rules[i], minimatchOpts)); + if (minimatch(file, rules[i], minimatchOpts)) { + watched++; + + // don't repeat the output if a rule is matched + if (usedRules.indexOf(rules[i]) === -1) { + usedRules.push(rules[i]); + utils.log.detail('matched rule: ' + rules[i]); + } + + // if the rule doesn't match the WATCH EVERYTHING + // but *does* match a rule that ends with *.*, then + // white list it - in that we don't run it through + // the extension check too. + if (rules[i] !== '**' + path.sep + '*.*' && + rules[i].slice(-3) === '*.*') { + whitelist.push(file); + } else if (path.basename(file) === path.basename(rules[i])) { + // if the file matches the actual rule, then it's put on whitelist + whitelist.push(file); + } else { + good.push(file); + } + matched = true; + break; + } else { + // utils.log.detail('no match: ' + rules[i], file); + } + } + } + if (!matched) { + ignored++; + } + }); + + debug('good', good) + + // finally check the good files against the extensions that we're monitoring + if (ext) { + if (ext.indexOf(',') === -1) { + ext = '**/*.' + ext; + } else { + ext = '**/*.{' + ext + '}'; + } + + good = good.filter(function (file) { + // only compare the filename to the extension test + return minimatch(path.basename(file), ext, minimatchOpts); + }); + } // else assume *.* + + var result = good.concat(whitelist); + + if (utils.isWindows) { + // fix for windows testing - I *think* this is okay to do + result = result.map(function (file) { + return file.slice(0, 1).toLowerCase() + file.slice(1); + }); + } + + return { + result: result, + ignored: ignored, + watched: watched, + total: files.length, + }; +} diff --git a/node_modules/nodemon/lib/monitor/run.js b/node_modules/nodemon/lib/monitor/run.js new file mode 100644 index 00000000..7084c6bf --- /dev/null +++ b/node_modules/nodemon/lib/monitor/run.js @@ -0,0 +1,438 @@ +var debug = require('debug')('nodemon'); +const statSync = require('fs').statSync; +var utils = require('../utils'); +var bus = utils.bus; +var childProcess = require('child_process'); +var spawn = childProcess.spawn; +var exec = childProcess.exec; +var fork = childProcess.fork; +var watch = require('./watch').watch; +var config = require('../config'); +var child = null; // the actual child process we spawn +var killedAfterChange = false; +var noop = function () { }; +var restart = null; +var psTree = require('pstree.remy'); +var path = require('path'); +var signals = require('./signals'); + +function run(options) { + var cmd = config.command.raw; + + var runCmd = !options.runOnChangeOnly || config.lastStarted !== 0; + if (runCmd) { + utils.log.status('starting `' + config.command.string + '`'); + } + + /*jshint validthis:true*/ + restart = run.bind(this, options); + run.restart = restart; + + config.lastStarted = Date.now(); + + var stdio = ['pipe', 'pipe', 'pipe']; + + if (config.options.stdout) { + stdio = ['pipe', process.stdout, process.stderr]; + } + + if (config.options.stdin === false) { + stdio = [process.stdin, process.stdout, process.stderr]; + } + + var sh = 'sh'; + var shFlag = '-c'; + + const binPath = process.cwd() + '/node_modules/.bin'; + + const spawnOptions = { + env: Object.assign({}, process.env, options.execOptions.env, { + PATH: binPath + ':' + process.env.PATH, + }), + stdio: stdio, + } + + var executable = cmd.executable; + + if (utils.isWindows) { + // if the exec includes a forward slash, reverse it for windows compat + // but *only* apply to the first command, and none of the arguments. + // ref #1251 and #1236 + if (executable.indexOf('/') !== -1) { + executable = executable.split(' ').map((e, i) => { + if (i === 0) { + return path.normalize(e); + } + return e; + }).join(' '); + } + // taken from npm's cli: https://git.io/vNFD4 + sh = process.env.comspec || 'cmd'; + shFlag = '/d /s /c'; + spawnOptions.windowsVerbatimArguments = true; + } + + var args = runCmd ? utils.stringify(executable, cmd.args) : ':'; + var spawnArgs = [sh, [shFlag, args], spawnOptions]; + + const firstArg = cmd.args[0] || ''; + + var inBinPath = false; + try { + inBinPath = statSync(`${binPath}/${executable}`).isFile(); + } catch (e) {} + + // hasStdio allows us to correctly handle stdin piping + // see: https://git.io/vNtX3 + const hasStdio = utils.satisfies('>= 6.4.0 || < 5'); + + // forking helps with sub-process handling and tends to clean up better + // than spawning, but it should only be used under specific conditions + const shouldFork = + !config.options.spawn && + !inBinPath && + !(firstArg.indexOf('-') === 0) && // don't fork if there's a node exec arg + firstArg !== 'inspect' && // don't fork it's `inspect` debugger + executable === 'node' && // only fork if node + utils.version.major > 4 // only fork if node version > 4 + + if (shouldFork) { + var forkArgs = cmd.args.slice(1); + var env = utils.merge(options.execOptions.env, process.env); + stdio.push('ipc'); + child = fork(options.execOptions.script, forkArgs, { + env: env, + stdio: stdio, + silent: !hasStdio, + }); + utils.log.detail('forking'); + debug('fork', sh, shFlag, args) + } else { + utils.log.detail('spawning'); + child = spawn.apply(null, spawnArgs); + debug('spawn', sh, shFlag, args) + } + + if (config.required) { + var emit = { + stdout: function (data) { + bus.emit('stdout', data); + }, + stderr: function (data) { + bus.emit('stderr', data); + }, + }; + + // now work out what to bind to... + if (config.options.stdout) { + child.on('stdout', emit.stdout).on('stderr', emit.stderr); + } else { + child.stdout.on('data', emit.stdout); + child.stderr.on('data', emit.stderr); + + bus.stdout = child.stdout; + bus.stderr = child.stderr; + } + + if (shouldFork) { + child.on('message', function (message, sendHandle) { + bus.emit('message', message, sendHandle); + }); + } + } + + bus.emit('start'); + + utils.log.detail('child pid: ' + child.pid); + + child.on('error', function (error) { + bus.emit('error', error); + if (error.code === 'ENOENT') { + utils.log.error('unable to run executable: "' + cmd.executable + '"'); + process.exit(1); + } else { + utils.log.error('failed to start child process: ' + error.code); + throw error; + } + }); + + child.on('exit', function (code, signal) { + if (child && child.stdin) { + process.stdin.unpipe(child.stdin); + } + + if (code === 127) { + utils.log.error('failed to start process, "' + cmd.executable + + '" exec not found'); + bus.emit('error', code); + process.exit(); + } + + // If the command failed with code 2, it may or may not be a syntax error + // See: http://git.io/fNOAR + // We will only assume a parse error, if the child failed quickly + if (code === 2 && Date.now() < config.lastStarted + 500) { + utils.log.error('process failed, unhandled exit code (2)'); + utils.log.error(''); + utils.log.error('Either the command has a syntax error,'); + utils.log.error('or it is exiting with reserved code 2.'); + utils.log.error(''); + utils.log.error('To keep nodemon running even after a code 2,'); + utils.log.error('add this to the end of your command: || exit 1'); + utils.log.error(''); + utils.log.error('Read more here: https://git.io/fNOAG'); + utils.log.error(''); + utils.log.error('nodemon will stop now so that you can fix the command.'); + utils.log.error(''); + bus.emit('error', code); + process.exit(); + } + + // In case we killed the app ourselves, set the signal thusly + if (killedAfterChange) { + killedAfterChange = false; + signal = config.signal; + } + // this is nasty, but it gives it windows support + if (utils.isWindows && signal === 'SIGTERM') { + signal = config.signal; + } + + if (signal === config.signal || code === 0) { + // this was a clean exit, so emit exit, rather than crash + debug('bus.emit(exit) via ' + config.signal); + bus.emit('exit', signal); + + // exit the monitor, but do it gracefully + if (signal === config.signal) { + return restart(); + } + + if (code === 0) { // clean exit - wait until file change to restart + if (runCmd) { + utils.log.status('clean exit - waiting for changes before restart'); + } + child = null; + } + } else { + bus.emit('crash'); + if (options.exitcrash) { + utils.log.fail('app crashed'); + if (!config.required) { + process.exit(1); + } + } else { + utils.log.fail('app crashed - waiting for file changes before' + + ' starting...'); + child = null; + } + } + + if (config.options.restartable) { + // stdin needs to kick in again to be able to listen to the + // restart command + process.stdin.resume(); + } + }); + + run.kill = function (noRestart, callback) { + // I hate code like this :( - Remy (author of said code) + if (typeof noRestart === 'function') { + callback = noRestart; + noRestart = false; + } + + if (!callback) { + callback = noop; + } + + if (child !== null) { + // if the stdin piping is on, we need to unpipe, but also close stdin on + // the child, otherwise linux can throw EPIPE or ECONNRESET errors. + if (options.stdin) { + process.stdin.unpipe(child.stdin); + } + + // For the on('exit', ...) handler above the following looks like a + // crash, so we set the killedAfterChange flag if a restart is planned + if (!noRestart) { + killedAfterChange = true; + } + + /* Now kill the entire subtree of processes belonging to nodemon */ + var oldPid = child.pid; + if (child) { + kill(child, config.signal, function () { + // this seems to fix the 0.11.x issue with the "rs" restart command, + // though I'm unsure why. it seems like more data is streamed in to + // stdin after we close. + if (child && options.stdin && child.stdin && oldPid === child.pid) { + child.stdin.end(); + } + callback(); + }); + } + } else if (!noRestart) { + // if there's no child, then we need to manually start the process + // this is because as there was no child, the child.on('exit') event + // handler doesn't exist which would normally trigger the restart. + bus.once('start', callback); + restart(); + } else { + callback(); + } + }; + + // connect stdin to the child process (options.stdin is on by default) + if (options.stdin) { + process.stdin.resume(); + // FIXME decide whether or not we need to decide the encoding + // process.stdin.setEncoding('utf8'); + + // swallow the stdin error if it happens + // ref: https://github.com/remy/nodemon/issues/1195 + if (hasStdio) { + child.stdin.on('error', () => { }); + process.stdin.pipe(child.stdin); + } else { + if (child.stdout) { + child.stdout.pipe(process.stdout); + } else { + utils.log.error('running an unsupported version of node ' + + process.version); + utils.log.error('nodemon may not work as expected - ' + + 'please consider upgrading to LTS'); + } + } + + bus.once('exit', function () { + if (child && process.stdin.unpipe) { // node > 0.8 + process.stdin.unpipe(child.stdin); + } + }); + } + + debug('start watch on: %s', config.options.watch); + if (config.options.watch !== false) { + watch(); + } +} + +function kill(child, signal, callback) { + if (!callback) { + callback = function () { }; + } + + if (utils.isWindows) { + // When using CoffeeScript under Windows, child's process is not node.exe + // Instead coffee.cmd is launched, which launches cmd.exe, which starts + // node.exe as a child process child.kill() would only kill cmd.exe, not + // node.exe + // Therefore we use the Windows taskkill utility to kill the process and all + // its children (/T for tree). + // Force kill (/F) the whole child tree (/T) by PID (/PID 123) + exec('taskkill /pid ' + child.pid + ' /T /F'); + callback(); + } else { + // we use psTree to kill the full subtree of nodemon, because when + // spawning processes like `coffee` under the `--debug` flag, it'll spawn + // it's own child, and that can't be killed by nodemon, so psTree gives us + // an array of PIDs that have spawned under nodemon, and we send each the + // configured signal (default: SIGUSR2) signal, which fixes #335 + // note that psTree also works if `ps` is missing by looking in /proc + const sig = signal.replace('SIG', ''); + psTree(child.pid, function (err, kids) { + if (psTree.hasPS) { + spawn('kill', ['-s', sig, child.pid].concat(kids)) + .on('close', callback); + } else { + // make sure we kill from smallest to largest + const pids = kids.concat(child.pid).sort(); + pids.forEach(pid => { + exec('kill -' + signals[signal] + ' ' + pid, () => { }); + }); + callback(); + } + }); + + } +} + +// stubbed out for now, filled in during run +run.kill = function (flag, callback) { + if (callback) { + callback(); + } +}; +run.restart = noop; + +bus.on('quit', function onQuit(code) { + if (code === undefined) { + code = 0; + } + + // remove event listener + var exitTimer = null; + var exit = function () { + clearTimeout(exitTimer); + exit = noop; // null out in case of race condition + child = null; + if (!config.required) { + // Execute all other quit listeners. + bus.listeners('quit').forEach(function (listener) { + if (listener !== onQuit) { + listener(); + } + }); + process.exit(code); + } else { + bus.emit('exit'); + } + }; + + // if we're not running already, don't bother with trying to kill + if (config.run === false) { + return exit(); + } + + // immediately try to stop any polling + config.run = false; + + if (child) { + // give up waiting for the kids after 10 seconds + exitTimer = setTimeout(exit, 10 * 1000); + child.removeAllListeners('exit'); + child.once('exit', exit); + + kill(child, 'SIGINT'); + } else { + exit(); + } +}); + +bus.on('restart', function () { + // run.kill will send a SIGINT to the child process, which will cause it + // to terminate, which in turn uses the 'exit' event handler to restart + run.kill(); +}); + +// remove the child file on exit +process.on('exit', function () { + utils.log.detail('exiting'); + if (child) { child.kill(); } +}); + +// because windows borks when listening for the SIG* events +if (!utils.isWindows) { + bus.once('boot', () => { + // usual suspect: ctrl+c exit + process.once('SIGINT', () => bus.emit('quit', 130)); + process.once('SIGTERM', () => { + bus.emit('quit', 143); + if (child) { child.kill('SIGTERM'); } + }); + }) +} + + +module.exports = run; diff --git a/node_modules/nodemon/lib/monitor/signals.js b/node_modules/nodemon/lib/monitor/signals.js new file mode 100644 index 00000000..daff6e05 --- /dev/null +++ b/node_modules/nodemon/lib/monitor/signals.js @@ -0,0 +1,34 @@ +module.exports = { + SIGHUP: 1, + SIGINT: 2, + SIGQUIT: 3, + SIGILL: 4, + SIGTRAP: 5, + SIGABRT: 6, + SIGBUS: 7, + SIGFPE: 8, + SIGKILL: 9, + SIGUSR1: 10, + SIGSEGV: 11, + SIGUSR2: 12, + SIGPIPE: 13, + SIGALRM: 14, + SIGTERM: 15, + SIGSTKFLT: 16, + SIGCHLD: 17, + SIGCONT: 18, + SIGSTOP: 19, + SIGTSTP: 20, + SIGTTIN: 21, + SIGTTOU: 22, + SIGURG: 23, + SIGXCPU: 24, + SIGXFSZ: 25, + SIGVTALRM: 26, + SIGPROF: 27, + SIGWINCH: 28, + SIGIO: 29, + SIGPWR: 30, + SIGSYS: 31, + SIGRTMIN: 35, +} diff --git a/node_modules/nodemon/lib/monitor/watch.js b/node_modules/nodemon/lib/monitor/watch.js new file mode 100644 index 00000000..fe26c283 --- /dev/null +++ b/node_modules/nodemon/lib/monitor/watch.js @@ -0,0 +1,234 @@ +module.exports.watch = watch; +module.exports.resetWatchers = resetWatchers; + +var debug = require('debug')('nodemon:watch'); +var debugRoot = require('debug')('nodemon'); +var chokidar = require('chokidar'); +var undefsafe = require('undefsafe'); +var config = require('../config'); +var path = require('path'); +var utils = require('../utils'); +var bus = utils.bus; +var match = require('./match'); +var watchers = []; +var debouncedBus; + +bus.on('reset', resetWatchers); + +function resetWatchers() { + debugRoot('resetting watchers'); + watchers.forEach(function (watcher) { + watcher.close(); + }); + watchers = []; +} + +function watch() { + if (watchers.length) { + debug('early exit on watch, still watching (%s)', watchers.length); + return; + } + + var dirs = [].slice.call(config.dirs); + + debugRoot('start watch on: %s', dirs.join(', ')); + const rootIgnored = config.options.ignore; + debugRoot('ignored', rootIgnored); + + var watchedFiles = []; + + const promise = new Promise(function (resolve) { + const dotFilePattern = /[/\\]\./; + var ignored = match.rulesToMonitor( + [], // not needed + Array.from(rootIgnored), + config + ).map(pattern => pattern.slice(1)); + + const addDotFile = dirs.filter(dir => dir.match(dotFilePattern)); + + // don't ignore dotfiles if explicitly watched. + if (addDotFile.length === 0) { + ignored.push(dotFilePattern); + } + + var watchOptions = { + ignorePermissionErrors: true, + ignored: ignored, + persistent: true, + usePolling: config.options.legacyWatch || false, + interval: config.options.pollingInterval, + }; + + if (utils.isWindows) { + watchOptions.disableGlobbing = true; + } + + if (process.env.TEST) { + watchOptions.useFsEvents = false; + } + + var watcher = chokidar.watch( + dirs, + Object.assign({}, watchOptions, config.options.watchOptions || {}) + ); + + watcher.ready = false; + + var total = 0; + + watcher.on('change', filterAndRestart); + watcher.on('add', function (file) { + if (watcher.ready) { + return filterAndRestart(file); + } + + watchedFiles.push(file); + bus.emit('watching', file); + debug('watching dir: %s', file); + }); + watcher.on('ready', function () { + watchedFiles = Array.from(new Set(watchedFiles)); // ensure no dupes + total = watchedFiles.length; + watcher.ready = true; + resolve(total); + debugRoot('watch is complete'); + }); + + watcher.on('error', function (error) { + if (error.code === 'EINVAL') { + utils.log.error( + 'Internal watch failed. Likely cause: too many ' + + 'files being watched (perhaps from the root of a drive?\n' + + 'See https://github.com/paulmillr/chokidar/issues/229 for details' + ); + } else { + utils.log.error('Internal watch failed: ' + error.message); + process.exit(1); + } + }); + + watchers.push(watcher); + }); + + return promise.catch(e => { + // this is a core error and it should break nodemon - so I have to break + // out of a promise using the setTimeout + setTimeout(() => { + throw e; + }); + }).then(function () { + utils.log.detail(`watching ${watchedFiles.length} file${ + watchedFiles.length === 1 ? '' : 's'}`); + return watchedFiles; + }); +} + +function filterAndRestart(files) { + if (!Array.isArray(files)) { + files = [files]; + } + + if (files.length) { + var cwd = process.cwd(); + if (this.options && this.options.cwd) { + cwd = this.options.cwd; + } + + utils.log.detail( + 'files triggering change check: ' + + files + .map(file => { + const res = path.relative(cwd, file); + return res; + }) + .join(', ') + ); + + // make sure the path is right and drop an empty + // filenames (sometimes on windows) + files = files.filter(Boolean).map(file => { + return path.relative(process.cwd(), path.relative(cwd, file)); + }); + + if (utils.isWindows) { + // ensure the drive letter is in uppercase (c:\foo -> C:\foo) + files = files.map(f => { + if (f.indexOf(':') === -1) { return f; } + return f[0].toUpperCase() + f.slice(1); + }); + } + + + debug('filterAndRestart on', files); + + var matched = match( + files, + config.options.monitor, + undefsafe(config, 'options.execOptions.ext') + ); + + debug('matched?', JSON.stringify(matched)); + + // if there's no matches, then test to see if the changed file is the + // running script, if so, let's allow a restart + if (config.options.execOptions.script) { + const script = path.resolve(config.options.execOptions.script); + if (matched.result.length === 0 && script) { + const length = script.length; + files.find(file => { + if (file.substr(-length, length) === script) { + matched = { + result: [file], + total: 1, + }; + return true; + } + }); + } + } + + utils.log.detail( + 'changes after filters (before/after): ' + + [files.length, matched.result.length].join('/') + ); + + // reset the last check so we're only looking at recently modified files + config.lastStarted = Date.now(); + + if (matched.result.length) { + if (config.options.delay > 0) { + utils.log.detail('delaying restart for ' + config.options.delay + 'ms'); + if (debouncedBus === undefined) { + debouncedBus = debounce(restartBus, config.options.delay); + } + debouncedBus(matched); + } else { + return restartBus(matched); + } + } + } +} + +function restartBus(matched) { + utils.log.status('restarting due to changes...'); + matched.result.map(file => { + utils.log.detail(path.relative(process.cwd(), file)); + }); + + if (config.options.verbose) { + utils.log._log(''); + } + + bus.emit('restart', matched.result); +} + +function debounce(fn, delay) { + var timer = null; + return function () { + const context = this; + const args = arguments; + clearTimeout(timer); + timer = setTimeout(() =>fn.apply(context, args), delay); + }; +} diff --git a/node_modules/nodemon/lib/nodemon.js b/node_modules/nodemon/lib/nodemon.js new file mode 100644 index 00000000..1020f22d --- /dev/null +++ b/node_modules/nodemon/lib/nodemon.js @@ -0,0 +1,301 @@ +var debug = require('debug')('nodemon'); +var path = require('path'); +var monitor = require('./monitor'); +var cli = require('./cli'); +var version = require('./version'); +var util = require('util'); +var utils = require('./utils'); +var bus = utils.bus; +var help = require('./help'); +var config = require('./config'); +var spawn = require('./spawn'); +const defaults = require('./config/defaults') +var eventHandlers = {}; + +// this is fairly dirty, but theoretically sound since it's part of the +// stable module API +config.required = utils.isRequired; + +function nodemon(settings) { + bus.emit('boot'); + nodemon.reset(); + + // allow the cli string as the argument to nodemon, and allow for + // `node nodemon -V app.js` or just `-V app.js` + if (typeof settings === 'string') { + settings = settings.trim(); + if (settings.indexOf('node') !== 0) { + if (settings.indexOf('nodemon') !== 0) { + settings = 'nodemon ' + settings; + } + settings = 'node ' + settings; + } + settings = cli.parse(settings); + } + + // set the debug flag as early as possible to get all the detailed logging + if (settings.verbose) { + utils.debug = true; + } + + if (settings.help) { + process.stdout._handle.setBlocking(true); // nodejs/node#6456 + console.log(help(settings.help)); + if (!config.required) { + process.exit(0); + } + } + + if (settings.version) { + version().then(function (v) { + console.log(v); + if (!config.required) { + process.exit(0); + } + }); + return; + } + + // nodemon tools like grunt-nodemon. This affects where + // the script is being run from, and will affect where + // nodemon looks for the nodemon.json files + if (settings.cwd) { + // this is protection to make sure we haven't dont the chdir already... + // say like in cli/parse.js (which is where we do this once already!) + if (process.cwd() !== path.resolve(config.system.cwd, settings.cwd)) { + process.chdir(settings.cwd); + } + } + + const cwd = process.cwd(); + + config.load(settings, function (config) { + if (!config.options.dump && !config.options.execOptions.script && + config.options.execOptions.exec === 'node') { + if (!config.required) { + console.log(help('usage')); + process.exit(); + } + return; + } + + // before we print anything, update the colour setting on logging + utils.colours = config.options.colours; + + // always echo out the current version + utils.log.info(version.pinned); + + const cwd = process.cwd(); + + if (config.options.cwd) { + utils.log.detail('process root: ' + cwd); + } + + config.loaded.map(file => file.replace(cwd, '.')).forEach(file => { + utils.log.detail('reading config ' + file); + }); + + if (config.options.stdin && config.options.restartable) { + // allow nodemon to restart when the use types 'rs\n' + process.stdin.resume(); + process.stdin.setEncoding('utf8'); + process.stdin.on('data', data => { + const str = data.toString().trim().toLowerCase(); + + // if the keys entered match the restartable value, then restart! + if (str === config.options.restartable) { + bus.emit('restart'); + } else if (data.charCodeAt(0) === 12) { // ctrl+l + console.clear(); + } + }); + } else if (config.options.stdin) { + // so let's make sure we don't eat the key presses + // but also, since we're wrapping, watch out for + // special keys, like ctrl+c x 2 or '.exit' or ctrl+d or ctrl+l + var ctrlC = false; + var buffer = ''; + + process.stdin.on('data', function (data) { + data = data.toString(); + buffer += data; + const chr = data.charCodeAt(0); + + // if restartable, echo back + if (chr === 3) { + if (ctrlC) { + process.exit(0); + } + + ctrlC = true; + return; + } else if (buffer === '.exit' || chr === 4) { // ctrl+d + process.exit(); + } else if (chr === 13 || chr === 10) { // enter / carriage return + buffer = ''; + } else if (chr === 12) { // ctrl+l + console.clear(); + buffer = ''; + } + ctrlC = false; + }); + if (process.stdin.setRawMode) { + process.stdin.setRawMode(true); + } + } + + if (config.options.restartable) { + utils.log.info('to restart at any time, enter `' + + config.options.restartable + '`'); + } + + if (!config.required) { + const restartSignal = config.options.signal === 'SIGUSR2' ? 'SIGHUP' : 'SIGUSR2'; + process.on(restartSignal, nodemon.restart); + utils.bus.on('error', () => { + utils.log.fail((new Error().stack)); + }); + utils.log.detail((config.options.restartable ? 'or ' : '') + 'send ' + + restartSignal + ' to ' + process.pid + ' to restart'); + } + + const ignoring = config.options.monitor.map(function (rule) { + if (rule.slice(0, 1) !== '!') { + return false; + } + + rule = rule.slice(1); + + // don't notify of default ignores + if (defaults.ignoreRoot.indexOf(rule) !== -1) { + return false; + return rule.slice(3).slice(0, -3); + } + + if (rule.startsWith(cwd)) { + return rule.replace(cwd, '.'); + } + + return rule; + }).filter(Boolean).join(' '); + if (ignoring) utils.log.detail('ignoring: ' + ignoring); + + utils.log.info('watching: ' + config.options.monitor.map(function (rule) { + return rule.slice(0, 1) !== '!' ? rule : false; + }).filter(Boolean).join(' ')); + + utils.log.detail('watching extensions: ' + (config.options.execOptions.ext || '(all)')); + + if (config.options.dump) { + utils.log._log('log', '--------------'); + utils.log._log('log', 'node: ' + process.version); + utils.log._log('log', 'nodemon: ' + version.pinned); + utils.log._log('log', 'command: ' + process.argv.join(' ')); + utils.log._log('log', 'cwd: ' + cwd); + utils.log._log('log', ['OS:', process.platform, process.arch].join(' ')); + utils.log._log('log', '--------------'); + utils.log._log('log', util.inspect(config, { depth: null })); + utils.log._log('log', '--------------'); + if (!config.required) { + process.exit(); + } + + return; + } + + config.run = true; + + if (config.options.stdout === false) { + nodemon.on('start', function () { + nodemon.stdout = bus.stdout; + nodemon.stderr = bus.stderr; + + bus.emit('readable'); + }); + } + + if (config.options.events && Object.keys(config.options.events).length) { + Object.keys(config.options.events).forEach(function (key) { + utils.log.detail('bind ' + key + ' -> `' + + config.options.events[key] + '`'); + nodemon.on(key, function () { + if (config.options && config.options.events) { + spawn(config.options.events[key], config, + [].slice.apply(arguments)); + } + }); + }); + } + + monitor.run(config.options); + + }); + + return nodemon; +} + +nodemon.restart = function () { + utils.log.status('restarting child process'); + bus.emit('restart'); + return nodemon; +}; + +nodemon.addListener = nodemon.on = function (event, handler) { + if (!eventHandlers[event]) { eventHandlers[event] = []; } + eventHandlers[event].push(handler); + bus.on(event, handler); + return nodemon; +}; + +nodemon.once = function (event, handler) { + if (!eventHandlers[event]) { eventHandlers[event] = []; } + eventHandlers[event].push(handler); + bus.once(event, function () { + debug('bus.once(%s)', event); + eventHandlers[event].splice(eventHandlers[event].indexOf(handler), 1); + handler.apply(this, arguments); + }); + return nodemon; +}; + +nodemon.emit = function () { + bus.emit.apply(bus, [].slice.call(arguments)); + return nodemon; +}; + +nodemon.removeAllListeners = function (event) { + // unbind only the `nodemon.on` event handlers + Object.keys(eventHandlers).filter(function (e) { + return event ? e === event : true; + }).forEach(function (event) { + eventHandlers[event].forEach(function (handler) { + bus.removeListener(event, handler); + eventHandlers[event].splice(eventHandlers[event].indexOf(handler), 1); + }); + }); + + return nodemon; +}; + +nodemon.reset = function (done) { + bus.emit('reset', done); +}; + +bus.on('reset', function (done) { + debug('reset'); + nodemon.removeAllListeners(); + monitor.run.kill(true, function () { + utils.reset(); + config.reset(); + config.run = false; + if (done) { + done(); + } + }); +}); + +// expose the full config +nodemon.config = config; + +module.exports = nodemon; + diff --git a/node_modules/nodemon/lib/rules/add.js b/node_modules/nodemon/lib/rules/add.js new file mode 100644 index 00000000..de85bb7f --- /dev/null +++ b/node_modules/nodemon/lib/rules/add.js @@ -0,0 +1,89 @@ +'use strict'; + +var utils = require('../utils'); + +// internal +var reEscComments = /\\#/g; +// note that '^^' is used in place of escaped comments +var reUnescapeComments = /\^\^/g; +var reComments = /#.*$/; +var reEscapeChars = /[.|\-[\]()\\]/g; +var reAsterisk = /\*/g; + +module.exports = add; + +/** + * Converts file patterns or regular expressions to nodemon + * compatible RegExp matching rules. Note: the `rules` argument + * object is modified to include the new rule and new RegExp + * + * ### Example: + * + * var rules = { watch: [], ignore: [] }; + * add(rules, 'watch', '*.js'); + * add(rules, 'ignore', '/public/'); + * add(rules, 'watch', ':(\d)*\.js'); // note: string based regexp + * add(rules, 'watch', /\d*\.js/); + * + * @param {Object} rules containing `watch` and `ignore`. Also updated during + * execution + * @param {String} which must be either "watch" or "ignore" + * @param {String|RegExp} the actual rule. + */ +function add(rules, which, rule) { + if (!{ ignore: 1, watch: 1}[which]) { + throw new Error('rules/index.js#add requires "ignore" or "watch" as the ' + + 'first argument'); + } + + if (Array.isArray(rule)) { + rule.forEach(function (rule) { + add(rules, which, rule); + }); + return; + } + + // support the rule being a RegExp, but reformat it to + // the custom : format that we're working with. + if (rule instanceof RegExp) { + // rule = ':' + rule.toString().replace(/^\/(.*?)\/$/g, '$1'); + utils.log.error('RegExp format no longer supported, but globs are.'); + return; + } + + // remove comments and trim lines + // this mess of replace methods is escaping "\#" to allow for emacs temp files + + // first up strip comments and remove blank head or tails + rule = (rule || '').replace(reEscComments, '^^') + .replace(reComments, '') + .replace(reUnescapeComments, '#').trim(); + + var regexp = false; + + if (typeof rule === 'string' && rule.substring(0, 1) === ':') { + rule = rule.substring(1); + utils.log.error('RegExp no longer supported: ' + rule); + regexp = true; + } else if (rule.length === 0) { + // blank line (or it was a comment) + return; + } + + if (regexp) { + // rules[which].push(rule); + } else { + // rule = rule.replace(reEscapeChars, '\\$&') + // .replace(reAsterisk, '.*'); + + rules[which].push(rule); + // compile a regexp of all the rules for this ignore or watch + var re = rules[which].map(function (rule) { + return rule.replace(reEscapeChars, '\\$&') + .replace(reAsterisk, '.*'); + }).join('|'); + + // used for the directory matching + rules[which].re = new RegExp(re); + } +} diff --git a/node_modules/nodemon/lib/rules/index.js b/node_modules/nodemon/lib/rules/index.js new file mode 100644 index 00000000..04aa92f8 --- /dev/null +++ b/node_modules/nodemon/lib/rules/index.js @@ -0,0 +1,53 @@ +'use strict'; +var utils = require('../utils'); +var add = require('./add'); +var parse = require('./parse'); + +// exported +var rules = { ignore: [], watch: [] }; + +/** + * Loads a nodemon config file and populates the ignore + * and watch rules with it's contents, and calls callback + * with the new rules + * + * @param {String} filename + * @param {Function} callback + */ +function load(filename, callback) { + parse(filename, function (err, result) { + if (err) { + // we should have bombed already, but + utils.log.error(err); + callback(err); + } + + if (result.raw) { + result.raw.forEach(add.bind(null, rules, 'ignore')); + } else { + result.ignore.forEach(add.bind(null, rules, 'ignore')); + result.watch.forEach(add.bind(null, rules, 'watch')); + } + + callback(null, rules); + }); +} + +module.exports = { + reset: function () { // just used for testing + rules.ignore.length = rules.watch.length = 0; + delete rules.ignore.re; + delete rules.watch.re; + }, + load: load, + ignore: { + test: add.bind(null, rules, 'ignore'), + add: add.bind(null, rules, 'ignore'), + }, + watch: { + test: add.bind(null, rules, 'watch'), + add: add.bind(null, rules, 'watch'), + }, + add: add.bind(null, rules), + rules: rules, +}; \ No newline at end of file diff --git a/node_modules/nodemon/lib/rules/parse.js b/node_modules/nodemon/lib/rules/parse.js new file mode 100644 index 00000000..6e1cacea --- /dev/null +++ b/node_modules/nodemon/lib/rules/parse.js @@ -0,0 +1,43 @@ +'use strict'; +var fs = require('fs'); + +/** + * Parse the nodemon config file, supporting both old style + * plain text config file, and JSON version of the config + * + * @param {String} filename + * @param {Function} callback + */ +function parse(filename, callback) { + var rules = { + ignore: [], + watch: [], + }; + + fs.readFile(filename, 'utf8', function (err, content) { + + if (err) { + return callback(err); + } + + var json = null; + try { + json = JSON.parse(content); + } catch (e) {} + + if (json !== null) { + rules = { + ignore: json.ignore || [], + watch: json.watch || [], + }; + + return callback(null, rules); + } + + // otherwise return the raw file + return callback(null, { raw: content.split(/\n/) }); + }); +} + +module.exports = parse; + diff --git a/node_modules/nodemon/lib/spawn.js b/node_modules/nodemon/lib/spawn.js new file mode 100644 index 00000000..d0adaeb1 --- /dev/null +++ b/node_modules/nodemon/lib/spawn.js @@ -0,0 +1,55 @@ +const utils = require('./utils'); +const merge = utils.merge; +const bus = utils.bus; +const spawn = require('child_process').spawn; + +module.exports = function spawnCommand(command, config, eventArgs) { + var stdio = ['pipe', 'pipe', 'pipe']; + + if (config.options.stdout) { + stdio = ['pipe', process.stdout, process.stderr]; + } + + var sh = 'sh'; + var shFlag = '-c'; + + if (utils.isWindows) { + sh = 'cmd'; + shFlag = '/c'; + } + + + if (!Array.isArray(command)) { + command = [command]; + } + + const args = command.join(' '); + + const env = merge(process.env, { FILENAME: eventArgs[0] }); + const child = spawn(sh, [shFlag, args], { + env: merge(config.options.execOptions.env, env), + stdio: stdio, + }); + + if (config.required) { + var emit = { + stdout: function (data) { + bus.emit('stdout', data); + }, + stderr: function (data) { + bus.emit('stderr', data); + }, + }; + + // now work out what to bind to... + if (config.options.stdout) { + child.on('stdout', emit.stdout).on('stderr', emit.stderr); + } else { + child.stdout.on('data', emit.stdout); + child.stderr.on('data', emit.stderr); + + bus.stdout = child.stdout; + bus.stderr = child.stderr; + } + } +}; diff --git a/node_modules/nodemon/lib/utils/bus.js b/node_modules/nodemon/lib/utils/bus.js new file mode 100644 index 00000000..4e120c58 --- /dev/null +++ b/node_modules/nodemon/lib/utils/bus.js @@ -0,0 +1,44 @@ +var events = require('events'); +var debug = require('debug')('nodemon'); +var util = require('util'); + +var Bus = function () { + events.EventEmitter.call(this); +}; + +util.inherits(Bus, events.EventEmitter); + +var bus = new Bus(); + +// /* +var collected = {}; +bus.on('newListener', function (event) { + debug('bus new listener: %s (%s)', event, bus.listeners(event).length); + if (!collected[event]) { + collected[event] = true; + bus.on(event, function () { + debug('bus emit: %s', event); + }); + } +}); + +// */ + +// proxy process messages (if forked) to the bus +process.on('message', function (event) { + debug('process.message(%s)', event); + bus.emit(event); +}); + +var emit = bus.emit; + +// if nodemon was spawned via a fork, allow upstream communication +// via process.send +if (process.send) { + bus.emit = function (event, data) { + process.send({ type: event, data: data }); + emit.apply(bus, arguments); + }; +} + +module.exports = bus; diff --git a/node_modules/nodemon/lib/utils/clone.js b/node_modules/nodemon/lib/utils/clone.js new file mode 100644 index 00000000..6ba6330f --- /dev/null +++ b/node_modules/nodemon/lib/utils/clone.js @@ -0,0 +1,40 @@ +module.exports = clone; + +// via http://stackoverflow.com/a/728694/22617 +function clone(obj) { + // Handle the 3 simple types, and null or undefined + if (null === obj || 'object' !== typeof obj) { + return obj; + } + + var copy; + + // Handle Date + if (obj instanceof Date) { + copy = new Date(); + copy.setTime(obj.getTime()); + return copy; + } + + // Handle Array + if (obj instanceof Array) { + copy = []; + for (var i = 0, len = obj.length; i < len; i++) { + copy[i] = clone(obj[i]); + } + return copy; + } + + // Handle Object + if (obj instanceof Object) { + copy = {}; + for (var attr in obj) { + if (obj.hasOwnProperty && obj.hasOwnProperty(attr)) { + copy[attr] = clone(obj[attr]); + } + } + return copy; + } + + throw new Error('Unable to copy obj! Its type isn\'t supported.'); +} \ No newline at end of file diff --git a/node_modules/nodemon/lib/utils/colour.js b/node_modules/nodemon/lib/utils/colour.js new file mode 100644 index 00000000..8c1b5905 --- /dev/null +++ b/node_modules/nodemon/lib/utils/colour.js @@ -0,0 +1,26 @@ +/** + * Encodes a string in a colour: red, yellow or green + * @param {String} c colour to highlight in + * @param {String} str the string to encode + * @return {String} coloured string for terminal printing + */ +function colour(c, str) { + return (colour[c] || colour.black) + str + colour.black; +} + +function strip(str) { + re.lastIndex = 0; // reset position + return str.replace(re, ''); +} + +colour.red = '\x1B[31m'; +colour.yellow = '\x1B[33m'; +colour.green = '\x1B[32m'; +colour.black = '\x1B[39m'; + +var reStr = Object.keys(colour).map(key => colour[key]).join('|'); +var re = new RegExp(('(' + reStr + ')').replace(/\[/g, '\\['), 'g'); + +colour.strip = strip; + +module.exports = colour; diff --git a/node_modules/nodemon/lib/utils/index.js b/node_modules/nodemon/lib/utils/index.js new file mode 100644 index 00000000..c4803383 --- /dev/null +++ b/node_modules/nodemon/lib/utils/index.js @@ -0,0 +1,102 @@ +var noop = function () { }; +var path = require('path'); +const semver = require('semver'); +var version = process.versions.node.split('.') || [null, null, null]; + +var utils = (module.exports = { + semver: semver, + satisfies: test => semver.satisfies(process.versions.node, test), + version: { + major: parseInt(version[0] || 0, 10), + minor: parseInt(version[1] || 0, 10), + patch: parseInt(version[2] || 0, 10), + }, + clone: require('./clone'), + merge: require('./merge'), + bus: require('./bus'), + isWindows: process.platform === 'win32', + isMac: process.platform === 'darwin', + isLinux: process.platform === 'linux', + isRequired: (function () { + var p = module.parent; + while (p) { + // in electron.js engine it happens + if (!p.filename) { + return true; + } + if (p.filename.indexOf('bin' + path.sep + 'nodemon.js') !== -1) { + return false; + } + p = p.parent; + } + + return true; + })(), + home: process.env.HOME || process.env.HOMEPATH, + quiet: function () { + // nukes the logging + if (!this.debug) { + for (var method in utils.log) { + if (typeof utils.log[method] === 'function') { + utils.log[method] = noop; + } + } + } + }, + reset: function () { + if (!this.debug) { + for (var method in utils.log) { + if (typeof utils.log[method] === 'function') { + delete utils.log[method]; + } + } + } + this.debug = false; + }, + regexpToText: function (t) { + return t + .replace(/\.\*\\./g, '*.') + .replace(/\\{2}/g, '^^') + .replace(/\\/g, '') + .replace(/\^\^/g, '\\'); + }, + stringify: function (exec, args) { + // serializes an executable string and array of arguments into a string + args = args || []; + + return [exec] + .concat( + args.map(function (arg) { + // if an argument contains a space, we want to show it with quotes + // around it to indicate that it is a single argument + if (arg.length > 0 && arg.indexOf(' ') === -1) { + return arg; + } + // this should correctly escape nested quotes + return JSON.stringify(arg); + }) + ) + .join(' ') + .trim(); + }, +}); + +utils.log = require('./log')(utils.isRequired); + +Object.defineProperty(utils, 'debug', { + set: function (value) { + this.log.debug = value; + }, + get: function () { + return this.log.debug; + }, +}); + +Object.defineProperty(utils, 'colours', { + set: function (value) { + this.log.useColours = value; + }, + get: function () { + return this.log.useColours; + }, +}); diff --git a/node_modules/nodemon/lib/utils/log.js b/node_modules/nodemon/lib/utils/log.js new file mode 100644 index 00000000..65800872 --- /dev/null +++ b/node_modules/nodemon/lib/utils/log.js @@ -0,0 +1,82 @@ +var colour = require('./colour'); +var bus = require('./bus'); +var required = false; +var useColours = true; + +var coding = { + log: 'black', + info: 'yellow', + status: 'green', + detail: 'yellow', + fail: 'red', + error: 'red', +}; + +function log(type, text) { + var msg = '[nodemon] ' + (text || ''); + + if (useColours) { + msg = colour(coding[type], msg); + } + + // always push the message through our bus, using nextTick + // to help testing and get _out of_ promises. + process.nextTick(() => { + bus.emit('log', { type: type, message: text, colour: msg }); + }); + + // but if we're running on the command line, also echo out + // question: should we actually just consume our own events? + if (!required) { + if (type === 'error') { + console.error(msg); + } else { + console.log(msg || ''); + } + } +} + +var Logger = function (r) { + if (!(this instanceof Logger)) { + return new Logger(r); + } + this.required(r); + return this; +}; + +Object.keys(coding).forEach(function (type) { + Logger.prototype[type] = log.bind(null, type); +}); + +// detail is for messages that are turned on during debug +Logger.prototype.detail = function (msg) { + if (this.debug) { + log('detail', msg); + } +}; + +Logger.prototype.required = function (val) { + required = val; +}; + +Logger.prototype.debug = false; +Logger.prototype._log = function (type, msg) { + if (required) { + bus.emit('log', { type: type, message: msg || '', colour: msg || '' }); + } else if (type === 'error') { + console.error(msg); + } else { + console.log(msg || ''); + } +}; + +Object.defineProperty(Logger.prototype, 'useColours', { + set: function (val) { + useColours = val; + }, + get: function () { + return useColours; + }, +}); + +module.exports = Logger; diff --git a/node_modules/nodemon/lib/utils/merge.js b/node_modules/nodemon/lib/utils/merge.js new file mode 100644 index 00000000..1f3440bd --- /dev/null +++ b/node_modules/nodemon/lib/utils/merge.js @@ -0,0 +1,47 @@ +var clone = require('./clone'); + +module.exports = merge; + +function typesMatch(a, b) { + return (typeof a === typeof b) && (Array.isArray(a) === Array.isArray(b)); +} + +/** + * A deep merge of the source based on the target. + * @param {Object} source [description] + * @param {Object} target [description] + * @return {Object} [description] + */ +function merge(source, target, result) { + if (result === undefined) { + result = clone(source); + } + + // merge missing values from the target to the source + Object.getOwnPropertyNames(target).forEach(function (key) { + if (source[key] === undefined) { + result[key] = target[key]; + } + }); + + Object.getOwnPropertyNames(source).forEach(function (key) { + var value = source[key]; + + if (target[key] && typesMatch(value, target[key])) { + // merge empty values + if (value === '') { + result[key] = target[key]; + } + + if (Array.isArray(value)) { + if (value.length === 0 && target[key].length) { + result[key] = target[key].slice(0); + } + } else if (typeof value === 'object') { + result[key] = merge(value, target[key]); + } + } + }); + + return result; +} \ No newline at end of file diff --git a/node_modules/nodemon/lib/version.js b/node_modules/nodemon/lib/version.js new file mode 100644 index 00000000..d0f51044 --- /dev/null +++ b/node_modules/nodemon/lib/version.js @@ -0,0 +1,100 @@ +module.exports = version; +module.exports.pin = pin; + +var fs = require('fs'); +var path = require('path'); +var exec = require('child_process').exec; +var root = null; + +function pin() { + return version().then(function (v) { + version.pinned = v; + }); +} + +function version(callback) { + // first find the package.json as this will be our root + var promise = findPackage(path.dirname(module.parent.filename)) + .then(function (dir) { + // now try to load the package + var v = require(path.resolve(dir, 'package.json')).version; + + if (v && v !== '0.0.0-development') { + return v; + } + + root = dir; + + // else we're in development, give the commit out + // get the last commit and whether the working dir is dirty + var promises = [ + branch().catch(function () { return 'master'; }), + commit().catch(function () { return ''; }), + dirty().catch(function () { return 0; }), + ]; + + // use the cached result as the export + return Promise.all(promises).then(function (res) { + var branch = res[0]; + var commit = res[1]; + var dirtyCount = parseInt(res[2], 10); + var curr = branch + ': ' + commit; + if (dirtyCount !== 0) { + curr += ' (' + dirtyCount + ' dirty files)'; + } + + return curr; + }); + }).catch(function (error) { + console.log(error.stack); + throw error; + }); + + if (callback) { + promise.then(function (res) { + callback(null, res); + }, callback); + } + + return promise; +} + +function findPackage(dir) { + if (dir === '/') { + return Promise.reject(new Error('package not found')); + } + return new Promise(function (resolve) { + fs.stat(path.resolve(dir, 'package.json'), function (error, exists) { + if (error || !exists) { + return resolve(findPackage(path.resolve(dir, '..'))); + } + + resolve(dir); + }); + }); +} + +function command(cmd) { + return new Promise(function (resolve, reject) { + exec(cmd, { cwd: root }, function (err, stdout, stderr) { + var error = stderr.trim(); + if (error) { + return reject(new Error(error)); + } + resolve(stdout.split('\n').join('')); + }); + }); +} + +function commit() { + return command('git rev-parse HEAD'); +} + +function branch() { + return command('git rev-parse --abbrev-ref HEAD'); +} + +function dirty() { + return command('expr $(git status --porcelain 2>/dev/null| ' + + 'egrep "^(M| M)" | wc -l)'); +} diff --git a/node_modules/nodemon/node_modules/.bin/nodetouch b/node_modules/nodemon/node_modules/.bin/nodetouch new file mode 120000 index 00000000..ec214c8d --- /dev/null +++ b/node_modules/nodemon/node_modules/.bin/nodetouch @@ -0,0 +1 @@ +../../../touch/bin/nodetouch.js \ No newline at end of file diff --git a/node_modules/nodemon/node_modules/.bin/semver b/node_modules/nodemon/node_modules/.bin/semver new file mode 120000 index 00000000..b3ca6032 --- /dev/null +++ b/node_modules/nodemon/node_modules/.bin/semver @@ -0,0 +1 @@ +../../../semver/bin/semver \ No newline at end of file diff --git a/node_modules/nodemon/node_modules/debug/CHANGELOG.md b/node_modules/nodemon/node_modules/debug/CHANGELOG.md new file mode 100644 index 00000000..820d21e3 --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/CHANGELOG.md @@ -0,0 +1,395 @@ + +3.1.0 / 2017-09-26 +================== + + * Add `DEBUG_HIDE_DATE` env var (#486) + * Remove ReDoS regexp in %o formatter (#504) + * Remove "component" from package.json + * Remove `component.json` + * Ignore package-lock.json + * Examples: fix colors printout + * Fix: browser detection + * Fix: spelling mistake (#496, @EdwardBetts) + +3.0.1 / 2017-08-24 +================== + + * Fix: Disable colors in Edge and Internet Explorer (#489) + +3.0.0 / 2017-08-08 +================== + + * Breaking: Remove DEBUG_FD (#406) + * Breaking: Use `Date#toISOString()` instead to `Date#toUTCString()` when output is not a TTY (#418) + * Breaking: Make millisecond timer namespace specific and allow 'always enabled' output (#408) + * Addition: document `enabled` flag (#465) + * Addition: add 256 colors mode (#481) + * Addition: `enabled()` updates existing debug instances, add `destroy()` function (#440) + * Update: component: update "ms" to v2.0.0 + * Update: separate the Node and Browser tests in Travis-CI + * Update: refactor Readme, fixed documentation, added "Namespace Colors" section, redid screenshots + * Update: separate Node.js and web browser examples for organization + * Update: update "browserify" to v14.4.0 + * Fix: fix Readme typo (#473) + +2.6.9 / 2017-09-22 +================== + + * remove ReDoS regexp in %o formatter (#504) + +2.6.8 / 2017-05-18 +================== + + * Fix: Check for undefined on browser globals (#462, @marbemac) + +2.6.7 / 2017-05-16 +================== + + * Fix: Update ms to 2.0.0 to fix regular expression denial of service vulnerability (#458, @hubdotcom) + * Fix: Inline extend function in node implementation (#452, @dougwilson) + * Docs: Fix typo (#455, @msasad) + +2.6.5 / 2017-04-27 +================== + + * Fix: null reference check on window.documentElement.style.WebkitAppearance (#447, @thebigredgeek) + * Misc: clean up browser reference checks (#447, @thebigredgeek) + * Misc: add npm-debug.log to .gitignore (@thebigredgeek) + + +2.6.4 / 2017-04-20 +================== + + * Fix: bug that would occur if process.env.DEBUG is a non-string value. (#444, @LucianBuzzo) + * Chore: ignore bower.json in npm installations. (#437, @joaovieira) + * Misc: update "ms" to v0.7.3 (@tootallnate) + +2.6.3 / 2017-03-13 +================== + + * Fix: Electron reference to `process.env.DEBUG` (#431, @paulcbetts) + * Docs: Changelog fix (@thebigredgeek) + +2.6.2 / 2017-03-10 +================== + + * Fix: DEBUG_MAX_ARRAY_LENGTH (#420, @slavaGanzin) + * Docs: Add backers and sponsors from Open Collective (#422, @piamancini) + * Docs: Add Slackin invite badge (@tootallnate) + +2.6.1 / 2017-02-10 +================== + + * Fix: Module's `export default` syntax fix for IE8 `Expected identifier` error + * Fix: Whitelist DEBUG_FD for values 1 and 2 only (#415, @pi0) + * Fix: IE8 "Expected identifier" error (#414, @vgoma) + * Fix: Namespaces would not disable once enabled (#409, @musikov) + +2.6.0 / 2016-12-28 +================== + + * Fix: added better null pointer checks for browser useColors (@thebigredgeek) + * Improvement: removed explicit `window.debug` export (#404, @tootallnate) + * Improvement: deprecated `DEBUG_FD` environment variable (#405, @tootallnate) + +2.5.2 / 2016-12-25 +================== + + * Fix: reference error on window within webworkers (#393, @KlausTrainer) + * Docs: fixed README typo (#391, @lurch) + * Docs: added notice about v3 api discussion (@thebigredgeek) + +2.5.1 / 2016-12-20 +================== + + * Fix: babel-core compatibility + +2.5.0 / 2016-12-20 +================== + + * Fix: wrong reference in bower file (@thebigredgeek) + * Fix: webworker compatibility (@thebigredgeek) + * Fix: output formatting issue (#388, @kribblo) + * Fix: babel-loader compatibility (#383, @escwald) + * Misc: removed built asset from repo and publications (@thebigredgeek) + * Misc: moved source files to /src (#378, @yamikuronue) + * Test: added karma integration and replaced babel with browserify for browser tests (#378, @yamikuronue) + * Test: coveralls integration (#378, @yamikuronue) + * Docs: simplified language in the opening paragraph (#373, @yamikuronue) + +2.4.5 / 2016-12-17 +================== + + * Fix: `navigator` undefined in Rhino (#376, @jochenberger) + * Fix: custom log function (#379, @hsiliev) + * Improvement: bit of cleanup + linting fixes (@thebigredgeek) + * Improvement: rm non-maintainted `dist/` dir (#375, @freewil) + * Docs: simplified language in the opening paragraph. (#373, @yamikuronue) + +2.4.4 / 2016-12-14 +================== + + * Fix: work around debug being loaded in preload scripts for electron (#368, @paulcbetts) + +2.4.3 / 2016-12-14 +================== + + * Fix: navigation.userAgent error for react native (#364, @escwald) + +2.4.2 / 2016-12-14 +================== + + * Fix: browser colors (#367, @tootallnate) + * Misc: travis ci integration (@thebigredgeek) + * Misc: added linting and testing boilerplate with sanity check (@thebigredgeek) + +2.4.1 / 2016-12-13 +================== + + * Fix: typo that broke the package (#356) + +2.4.0 / 2016-12-13 +================== + + * Fix: bower.json references unbuilt src entry point (#342, @justmatt) + * Fix: revert "handle regex special characters" (@tootallnate) + * Feature: configurable util.inspect()`options for NodeJS (#327, @tootallnate) + * Feature: %O`(big O) pretty-prints objects (#322, @tootallnate) + * Improvement: allow colors in workers (#335, @botverse) + * Improvement: use same color for same namespace. (#338, @lchenay) + +2.3.3 / 2016-11-09 +================== + + * Fix: Catch `JSON.stringify()` errors (#195, Jovan Alleyne) + * Fix: Returning `localStorage` saved values (#331, Levi Thomason) + * Improvement: Don't create an empty object when no `process` (Nathan Rajlich) + +2.3.2 / 2016-11-09 +================== + + * Fix: be super-safe in index.js as well (@TooTallNate) + * Fix: should check whether process exists (Tom Newby) + +2.3.1 / 2016-11-09 +================== + + * Fix: Added electron compatibility (#324, @paulcbetts) + * Improvement: Added performance optimizations (@tootallnate) + * Readme: Corrected PowerShell environment variable example (#252, @gimre) + * Misc: Removed yarn lock file from source control (#321, @fengmk2) + +2.3.0 / 2016-11-07 +================== + + * Fix: Consistent placement of ms diff at end of output (#215, @gorangajic) + * Fix: Escaping of regex special characters in namespace strings (#250, @zacronos) + * Fix: Fixed bug causing crash on react-native (#282, @vkarpov15) + * Feature: Enabled ES6+ compatible import via default export (#212 @bucaran) + * Feature: Added %O formatter to reflect Chrome's console.log capability (#279, @oncletom) + * Package: Update "ms" to 0.7.2 (#315, @DevSide) + * Package: removed superfluous version property from bower.json (#207 @kkirsche) + * Readme: fix USE_COLORS to DEBUG_COLORS + * Readme: Doc fixes for format string sugar (#269, @mlucool) + * Readme: Updated docs for DEBUG_FD and DEBUG_COLORS environment variables (#232, @mattlyons0) + * Readme: doc fixes for PowerShell (#271 #243, @exoticknight @unreadable) + * Readme: better docs for browser support (#224, @matthewmueller) + * Tooling: Added yarn integration for development (#317, @thebigredgeek) + * Misc: Renamed History.md to CHANGELOG.md (@thebigredgeek) + * Misc: Added license file (#226 #274, @CantemoInternal @sdaitzman) + * Misc: Updated contributors (@thebigredgeek) + +2.2.0 / 2015-05-09 +================== + + * package: update "ms" to v0.7.1 (#202, @dougwilson) + * README: add logging to file example (#193, @DanielOchoa) + * README: fixed a typo (#191, @amir-s) + * browser: expose `storage` (#190, @stephenmathieson) + * Makefile: add a `distclean` target (#189, @stephenmathieson) + +2.1.3 / 2015-03-13 +================== + + * Updated stdout/stderr example (#186) + * Updated example/stdout.js to match debug current behaviour + * Renamed example/stderr.js to stdout.js + * Update Readme.md (#184) + * replace high intensity foreground color for bold (#182, #183) + +2.1.2 / 2015-03-01 +================== + + * dist: recompile + * update "ms" to v0.7.0 + * package: update "browserify" to v9.0.3 + * component: fix "ms.js" repo location + * changed bower package name + * updated documentation about using debug in a browser + * fix: security error on safari (#167, #168, @yields) + +2.1.1 / 2014-12-29 +================== + + * browser: use `typeof` to check for `console` existence + * browser: check for `console.log` truthiness (fix IE 8/9) + * browser: add support for Chrome apps + * Readme: added Windows usage remarks + * Add `bower.json` to properly support bower install + +2.1.0 / 2014-10-15 +================== + + * node: implement `DEBUG_FD` env variable support + * package: update "browserify" to v6.1.0 + * package: add "license" field to package.json (#135, @panuhorsmalahti) + +2.0.0 / 2014-09-01 +================== + + * package: update "browserify" to v5.11.0 + * node: use stderr rather than stdout for logging (#29, @stephenmathieson) + +1.0.4 / 2014-07-15 +================== + + * dist: recompile + * example: remove `console.info()` log usage + * example: add "Content-Type" UTF-8 header to browser example + * browser: place %c marker after the space character + * browser: reset the "content" color via `color: inherit` + * browser: add colors support for Firefox >= v31 + * debug: prefer an instance `log()` function over the global one (#119) + * Readme: update documentation about styled console logs for FF v31 (#116, @wryk) + +1.0.3 / 2014-07-09 +================== + + * Add support for multiple wildcards in namespaces (#122, @seegno) + * browser: fix lint + +1.0.2 / 2014-06-10 +================== + + * browser: update color palette (#113, @gscottolson) + * common: make console logging function configurable (#108, @timoxley) + * node: fix %o colors on old node <= 0.8.x + * Makefile: find node path using shell/which (#109, @timoxley) + +1.0.1 / 2014-06-06 +================== + + * browser: use `removeItem()` to clear localStorage + * browser, node: don't set DEBUG if namespaces is undefined (#107, @leedm777) + * package: add "contributors" section + * node: fix comment typo + * README: list authors + +1.0.0 / 2014-06-04 +================== + + * make ms diff be global, not be scope + * debug: ignore empty strings in enable() + * node: make DEBUG_COLORS able to disable coloring + * *: export the `colors` array + * npmignore: don't publish the `dist` dir + * Makefile: refactor to use browserify + * package: add "browserify" as a dev dependency + * Readme: add Web Inspector Colors section + * node: reset terminal color for the debug content + * node: map "%o" to `util.inspect()` + * browser: map "%j" to `JSON.stringify()` + * debug: add custom "formatters" + * debug: use "ms" module for humanizing the diff + * Readme: add "bash" syntax highlighting + * browser: add Firebug color support + * browser: add colors for WebKit browsers + * node: apply log to `console` + * rewrite: abstract common logic for Node & browsers + * add .jshintrc file + +0.8.1 / 2014-04-14 +================== + + * package: re-add the "component" section + +0.8.0 / 2014-03-30 +================== + + * add `enable()` method for nodejs. Closes #27 + * change from stderr to stdout + * remove unnecessary index.js file + +0.7.4 / 2013-11-13 +================== + + * remove "browserify" key from package.json (fixes something in browserify) + +0.7.3 / 2013-10-30 +================== + + * fix: catch localStorage security error when cookies are blocked (Chrome) + * add debug(err) support. Closes #46 + * add .browser prop to package.json. Closes #42 + +0.7.2 / 2013-02-06 +================== + + * fix package.json + * fix: Mobile Safari (private mode) is broken with debug + * fix: Use unicode to send escape character to shell instead of octal to work with strict mode javascript + +0.7.1 / 2013-02-05 +================== + + * add repository URL to package.json + * add DEBUG_COLORED to force colored output + * add browserify support + * fix component. Closes #24 + +0.7.0 / 2012-05-04 +================== + + * Added .component to package.json + * Added debug.component.js build + +0.6.0 / 2012-03-16 +================== + + * Added support for "-" prefix in DEBUG [Vinay Pulim] + * Added `.enabled` flag to the node version [TooTallNate] + +0.5.0 / 2012-02-02 +================== + + * Added: humanize diffs. Closes #8 + * Added `debug.disable()` to the CS variant + * Removed padding. Closes #10 + * Fixed: persist client-side variant again. Closes #9 + +0.4.0 / 2012-02-01 +================== + + * Added browser variant support for older browsers [TooTallNate] + * Added `debug.enable('project:*')` to browser variant [TooTallNate] + * Added padding to diff (moved it to the right) + +0.3.0 / 2012-01-26 +================== + + * Added millisecond diff when isatty, otherwise UTC string + +0.2.0 / 2012-01-22 +================== + + * Added wildcard support + +0.1.0 / 2011-12-02 +================== + + * Added: remove colors unless stderr isatty [TooTallNate] + +0.0.1 / 2010-01-03 +================== + + * Initial release diff --git a/node_modules/nodemon/node_modules/debug/LICENSE b/node_modules/nodemon/node_modules/debug/LICENSE new file mode 100644 index 00000000..658c933d --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/LICENSE @@ -0,0 +1,19 @@ +(The MIT License) + +Copyright (c) 2014 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/nodemon/node_modules/debug/README.md b/node_modules/nodemon/node_modules/debug/README.md new file mode 100644 index 00000000..0ee7634d --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/README.md @@ -0,0 +1,437 @@ +# debug +[![Build Status](https://travis-ci.org/visionmedia/debug.svg?branch=master)](https://travis-ci.org/visionmedia/debug) [![Coverage Status](https://coveralls.io/repos/github/visionmedia/debug/badge.svg?branch=master)](https://coveralls.io/github/visionmedia/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/nodemon/node_modules/debug/dist/debug.js b/node_modules/nodemon/node_modules/debug/dist/debug.js new file mode 100644 index 00000000..f271e01c --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/dist/debug.js @@ -0,0 +1,886 @@ +"use strict"; + +function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +(function (f) { + if ((typeof exports === "undefined" ? "undefined" : _typeof(exports)) === "object" && typeof module !== "undefined") { + module.exports = f(); + } else if (typeof define === "function" && define.amd) { + define([], f); + } else { + var g; + + if (typeof window !== "undefined") { + g = window; + } else if (typeof global !== "undefined") { + g = global; + } else if (typeof self !== "undefined") { + g = self; + } else { + g = this; + } + + g.debug = f(); + } +})(function () { + var define, module, exports; + return function () { + function r(e, n, t) { + function o(i, f) { + if (!n[i]) { + if (!e[i]) { + var c = "function" == typeof require && require; + if (!f && c) return c(i, !0); + if (u) return u(i, !0); + var a = new Error("Cannot find module '" + i + "'"); + throw a.code = "MODULE_NOT_FOUND", a; + } + + var p = n[i] = { + exports: {} + }; + e[i][0].call(p.exports, function (r) { + var n = e[i][1][r]; + return o(n || r); + }, p, p.exports, r, e, n, t); + } + + return n[i].exports; + } + + for (var u = "function" == typeof require && require, i = 0; i < t.length; i++) { + o(t[i]); + } + + return o; + } + + return r; + }()({ + 1: [function (require, module, exports) { + /** + * Helpers. + */ + var s = 1000; + var m = s * 60; + var h = m * 60; + var d = h * 24; + var w = d * 7; + var y = d * 365.25; + /** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + + module.exports = function (val, options) { + options = options || {}; + + var type = _typeof(val); + + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isNaN(val) === false) { + return options.long ? fmtLong(val) : fmtShort(val); + } + + throw new Error('val is not a non-empty string or a valid number. val=' + JSON.stringify(val)); + }; + /** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + + + function parse(str) { + str = String(str); + + if (str.length > 100) { + return; + } + + var match = /^((?:\d+)?\-?\d?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(str); + + if (!match) { + return; + } + + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + + case 'weeks': + case 'week': + case 'w': + return n * w; + + case 'days': + case 'day': + case 'd': + return n * d; + + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + + default: + return undefined; + } + } + /** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + + + function fmtShort(ms) { + var msAbs = Math.abs(ms); + + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + + return ms + 'ms'; + } + /** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + + + function fmtLong(ms) { + var msAbs = Math.abs(ms); + + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + + return ms + ' ms'; + } + /** + * Pluralization helper. + */ + + + function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); + } + }, {}], + 2: [function (require, module, exports) { + // shim for using process in browser + var process = module.exports = {}; // cached from whatever global is present so that test runners that stub it + // don't break things. But we need to wrap it in a try catch in case it is + // wrapped in strict mode code which doesn't define any globals. It's inside a + // function because try/catches deoptimize in certain engines. + + var cachedSetTimeout; + var cachedClearTimeout; + + function defaultSetTimout() { + throw new Error('setTimeout has not been defined'); + } + + function defaultClearTimeout() { + throw new Error('clearTimeout has not been defined'); + } + + (function () { + try { + if (typeof setTimeout === 'function') { + cachedSetTimeout = setTimeout; + } else { + cachedSetTimeout = defaultSetTimout; + } + } catch (e) { + cachedSetTimeout = defaultSetTimout; + } + + try { + if (typeof clearTimeout === 'function') { + cachedClearTimeout = clearTimeout; + } else { + cachedClearTimeout = defaultClearTimeout; + } + } catch (e) { + cachedClearTimeout = defaultClearTimeout; + } + })(); + + function runTimeout(fun) { + if (cachedSetTimeout === setTimeout) { + //normal enviroments in sane situations + return setTimeout(fun, 0); + } // if setTimeout wasn't available but was latter defined + + + if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { + cachedSetTimeout = setTimeout; + return setTimeout(fun, 0); + } + + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedSetTimeout(fun, 0); + } catch (e) { + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedSetTimeout.call(null, fun, 0); + } catch (e) { + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error + return cachedSetTimeout.call(this, fun, 0); + } + } + } + + function runClearTimeout(marker) { + if (cachedClearTimeout === clearTimeout) { + //normal enviroments in sane situations + return clearTimeout(marker); + } // if clearTimeout wasn't available but was latter defined + + + if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { + cachedClearTimeout = clearTimeout; + return clearTimeout(marker); + } + + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedClearTimeout(marker); + } catch (e) { + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedClearTimeout.call(null, marker); + } catch (e) { + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. + // Some versions of I.E. have different rules for clearTimeout vs setTimeout + return cachedClearTimeout.call(this, marker); + } + } + } + + var queue = []; + var draining = false; + var currentQueue; + var queueIndex = -1; + + function cleanUpNextTick() { + if (!draining || !currentQueue) { + return; + } + + draining = false; + + if (currentQueue.length) { + queue = currentQueue.concat(queue); + } else { + queueIndex = -1; + } + + if (queue.length) { + drainQueue(); + } + } + + function drainQueue() { + if (draining) { + return; + } + + var timeout = runTimeout(cleanUpNextTick); + draining = true; + var len = queue.length; + + while (len) { + currentQueue = queue; + queue = []; + + while (++queueIndex < len) { + if (currentQueue) { + currentQueue[queueIndex].run(); + } + } + + queueIndex = -1; + len = queue.length; + } + + currentQueue = null; + draining = false; + runClearTimeout(timeout); + } + + process.nextTick = function (fun) { + var args = new Array(arguments.length - 1); + + if (arguments.length > 1) { + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + } + + queue.push(new Item(fun, args)); + + if (queue.length === 1 && !draining) { + runTimeout(drainQueue); + } + }; // v8 likes predictible objects + + + function Item(fun, array) { + this.fun = fun; + this.array = array; + } + + Item.prototype.run = function () { + this.fun.apply(null, this.array); + }; + + process.title = 'browser'; + process.browser = true; + process.env = {}; + process.argv = []; + process.version = ''; // empty string to avoid regexp issues + + process.versions = {}; + + function noop() {} + + process.on = noop; + process.addListener = noop; + process.once = noop; + process.off = noop; + process.removeListener = noop; + process.removeAllListeners = noop; + process.emit = noop; + process.prependListener = noop; + process.prependOnceListener = noop; + + process.listeners = function (name) { + return []; + }; + + process.binding = function (name) { + throw new Error('process.binding is not supported'); + }; + + process.cwd = function () { + return '/'; + }; + + process.chdir = function (dir) { + throw new Error('process.chdir is not supported'); + }; + + process.umask = function () { + return 0; + }; + }, {}], + 3: [function (require, module, exports) { + /** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + Object.keys(env).forEach(function (key) { + createDebug[key] = env[key]; + }); + /** + * Active `debug` instances. + */ + + createDebug.instances = []; + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + + createDebug.formatters = {}; + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + + function selectColor(namespace) { + var hash = 0; + + for (var i = 0; i < namespace.length; i++) { + hash = (hash << 5) - hash + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + + createDebug.selectColor = selectColor; + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + + function createDebug(namespace) { + var prevTime; + + function debug() { + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + // Disabled? + if (!debug.enabled) { + return; + } + + var self = debug; // Set `diff` timestamp + + var curr = Number(new Date()); + var ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } // Apply any `formatters` transformations + + + var index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, function (match, format) { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return match; + } + + index++; + var formatter = createDebug.formatters[format]; + + if (typeof formatter === 'function') { + var val = args[index]; + match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format` + + args.splice(index, 1); + index--; + } + + return match; + }); // Apply env-specific formatting (colors, etc.) + + createDebug.formatArgs.call(self, args); + var logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.enabled = createDebug.enabled(namespace); + debug.useColors = createDebug.useColors(); + debug.color = selectColor(namespace); + debug.destroy = destroy; + debug.extend = extend; // Debug.formatArgs = formatArgs; + // debug.rawLog = rawLog; + // env-specific initialization logic for debug instances + + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + createDebug.instances.push(debug); + return debug; + } + + function destroy() { + var index = createDebug.instances.indexOf(this); + + if (index !== -1) { + createDebug.instances.splice(index, 1); + return true; + } + + return false; + } + + function extend(namespace, delimiter) { + return createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + } + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + + + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.names = []; + createDebug.skips = []; + var i; + var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + var len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + + for (i = 0; i < createDebug.instances.length; i++) { + var instance = createDebug.instances[i]; + instance.enabled = createDebug.enabled(instance.namespace); + } + } + /** + * Disable debug output. + * + * @api public + */ + + + function disable() { + createDebug.enable(''); + } + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + + + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + var i; + var len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + + + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + + return val; + } + + createDebug.enable(createDebug.load()); + return createDebug; + } + + module.exports = setup; + }, { + "ms": 1 + }], + 4: [function (require, module, exports) { + (function (process) { + /* eslint-env browser */ + + /** + * This is the web browser implementation of `debug()`. + */ + exports.log = log; + exports.formatArgs = formatArgs; + exports.save = save; + exports.load = load; + exports.useColors = useColors; + exports.storage = localstorage(); + /** + * Colors. + */ + + exports.colors = ['#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33']; + /** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + // eslint-disable-next-line complexity + + function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } // Internet Explorer and Edge do not support colors. + + + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + + + return typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 + typeof window !== 'undefined' && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker + typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); + } + /** + * Colorize log arguments if enabled. + * + * @api public + */ + + + function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + var c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + + var index = 0; + var lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, function (match) { + if (match === '%%') { + return; + } + + index++; + + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + args.splice(lastC, 0, c); + } + /** + * Invokes `console.log()` when available. + * No-op when `console.log` is not a "function". + * + * @api public + */ + + + function log() { + var _console; + + // This hackery is required for IE8/9, where + // the `console.log` function doesn't have 'apply' + return (typeof console === "undefined" ? "undefined" : _typeof(console)) === 'object' && console.log && (_console = console).log.apply(_console, arguments); + } + /** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + + + function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) {// Swallow + // XXX (@Qix-) should we be logging these? + } + } + /** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + + + function load() { + var r; + + try { + r = exports.storage.getItem('debug'); + } catch (error) {} // Swallow + // XXX (@Qix-) should we be logging these? + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + + + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; + } + /** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + + + function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) {// Swallow + // XXX (@Qix-) should we be logging these? + } + } + + module.exports = require('./common')(exports); + var formatters = module.exports.formatters; + /** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + + formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } + }; + }).call(this, require('_process')); + }, { + "./common": 3, + "_process": 2 + }] + }, {}, [4])(4); +}); + diff --git a/node_modules/nodemon/node_modules/debug/node.js b/node_modules/nodemon/node_modules/debug/node.js new file mode 100644 index 00000000..7fc36fe6 --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/node.js @@ -0,0 +1 @@ +module.exports = require('./src/node'); diff --git a/node_modules/nodemon/node_modules/debug/package.json b/node_modules/nodemon/node_modules/debug/package.json new file mode 100644 index 00000000..3650bb0b --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/package.json @@ -0,0 +1,51 @@ +{ + "name": "debug", + "version": "3.2.6", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/debug.git" + }, + "description": "small debugging utility", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "node.js", + "dist/debug.js", + "LICENSE", + "README.md" + ], + "author": "TJ Holowaychuk ", + "contributors": [ + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + }, + "devDependencies": { + "@babel/cli": "^7.0.0", + "@babel/core": "^7.0.0", + "@babel/preset-env": "^7.0.0", + "browserify": "14.4.0", + "chai": "^3.5.0", + "concurrently": "^3.1.0", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.0.0", + "karma-chai": "^0.1.0", + "karma-mocha": "^1.3.0", + "karma-phantomjs-launcher": "^1.0.2", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "rimraf": "^2.5.4", + "xo": "^0.23.0" + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "unpkg": "./dist/debug.js" +} diff --git a/node_modules/nodemon/node_modules/debug/src/browser.js b/node_modules/nodemon/node_modules/debug/src/browser.js new file mode 100644 index 00000000..c924b0ac --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/src/browser.js @@ -0,0 +1,180 @@ +"use strict"; + +function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +/** + * Colors. + */ + +exports.colors = ['#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33']; +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ +// eslint-disable-next-line complexity + +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } // Internet Explorer and Edge do not support colors. + + + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + + + return typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 + typeof window !== 'undefined' && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker + typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); +} +/** + * Colorize log arguments if enabled. + * + * @api public + */ + + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + var c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + + var index = 0; + var lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, function (match) { + if (match === '%%') { + return; + } + + index++; + + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + args.splice(lastC, 0, c); +} +/** + * Invokes `console.log()` when available. + * No-op when `console.log` is not a "function". + * + * @api public + */ + + +function log() { + var _console; + + // This hackery is required for IE8/9, where + // the `console.log` function doesn't have 'apply' + return (typeof console === "undefined" ? "undefined" : _typeof(console)) === 'object' && console.log && (_console = console).log.apply(_console, arguments); +} +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + + +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) {// Swallow + // XXX (@Qix-) should we be logging these? + } +} +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + + +function load() { + var r; + + try { + r = exports.storage.getItem('debug'); + } catch (error) {} // Swallow + // XXX (@Qix-) should we be logging these? + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + + + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) {// Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); +var formatters = module.exports.formatters; +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + diff --git a/node_modules/nodemon/node_modules/debug/src/common.js b/node_modules/nodemon/node_modules/debug/src/common.js new file mode 100644 index 00000000..e0de3fb5 --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/src/common.js @@ -0,0 +1,249 @@ +"use strict"; + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + Object.keys(env).forEach(function (key) { + createDebug[key] = env[key]; + }); + /** + * Active `debug` instances. + */ + + createDebug.instances = []; + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + + createDebug.formatters = {}; + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + + function selectColor(namespace) { + var hash = 0; + + for (var i = 0; i < namespace.length; i++) { + hash = (hash << 5) - hash + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + + createDebug.selectColor = selectColor; + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + + function createDebug(namespace) { + var prevTime; + + function debug() { + // Disabled? + if (!debug.enabled) { + return; + } + + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + var self = debug; // Set `diff` timestamp + + var curr = Number(new Date()); + var ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } // Apply any `formatters` transformations + + + var index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, function (match, format) { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return match; + } + + index++; + var formatter = createDebug.formatters[format]; + + if (typeof formatter === 'function') { + var val = args[index]; + match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format` + + args.splice(index, 1); + index--; + } + + return match; + }); // Apply env-specific formatting (colors, etc.) + + createDebug.formatArgs.call(self, args); + var logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.enabled = createDebug.enabled(namespace); + debug.useColors = createDebug.useColors(); + debug.color = selectColor(namespace); + debug.destroy = destroy; + debug.extend = extend; // Debug.formatArgs = formatArgs; + // debug.rawLog = rawLog; + // env-specific initialization logic for debug instances + + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + createDebug.instances.push(debug); + return debug; + } + + function destroy() { + var index = createDebug.instances.indexOf(this); + + if (index !== -1) { + createDebug.instances.splice(index, 1); + return true; + } + + return false; + } + + function extend(namespace, delimiter) { + return createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + } + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + + + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.names = []; + createDebug.skips = []; + var i; + var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + var len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + + for (i = 0; i < createDebug.instances.length; i++) { + var instance = createDebug.instances[i]; + instance.enabled = createDebug.enabled(instance.namespace); + } + } + /** + * Disable debug output. + * + * @api public + */ + + + function disable() { + createDebug.enable(''); + } + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + + + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + var i; + var len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + + + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + + return val; + } + + createDebug.enable(createDebug.load()); + return createDebug; +} + +module.exports = setup; + diff --git a/node_modules/nodemon/node_modules/debug/src/index.js b/node_modules/nodemon/node_modules/debug/src/index.js new file mode 100644 index 00000000..02173159 --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/src/index.js @@ -0,0 +1,12 @@ +"use strict"; + +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} + diff --git a/node_modules/nodemon/node_modules/debug/src/node.js b/node_modules/nodemon/node_modules/debug/src/node.js new file mode 100644 index 00000000..dbbb5f10 --- /dev/null +++ b/node_modules/nodemon/node_modules/debug/src/node.js @@ -0,0 +1,174 @@ +"use strict"; + +/** + * Module dependencies. + */ +var tty = require('tty'); + +var util = require('util'); +/** + * This is the Node.js implementation of `debug()`. + */ + + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + var supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221]; + } +} catch (error) {} // Swallow - we only care if `supports-color` is available; it doesn't have to be. + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + + +exports.inspectOpts = Object.keys(process.env).filter(function (key) { + return /^debug_/i.test(key); +}).reduce(function (obj, key) { + // Camel-case + var prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, function (_, k) { + return k.toUpperCase(); + }); // Coerce string value into JS value + + var val = process.env[key]; + + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd); +} +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + + +function formatArgs(args) { + var name = this.namespace, + useColors = this.useColors; + + if (useColors) { + var c = this.color; + var colorCode = "\x1B[3" + (c < 8 ? c : '8;5;' + c); + var prefix = " ".concat(colorCode, ";1m").concat(name, " \x1B[0m"); + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + "\x1B[0m"); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + + return new Date().toISOString() + ' '; +} +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + + +function log() { + return process.stderr.write(util.format.apply(util, arguments) + '\n'); +} +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + + +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + + +function load() { + return process.env.DEBUG; +} +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + + +function init(debug) { + debug.inspectOpts = {}; + var keys = Object.keys(exports.inspectOpts); + + for (var i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); +var formatters = module.exports.formatters; +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts).replace(/\s*\n\s*/g, ' '); +}; +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + diff --git a/node_modules/nodemon/node_modules/ms/index.js b/node_modules/nodemon/node_modules/ms/index.js new file mode 100644 index 00000000..c4498bcc --- /dev/null +++ b/node_modules/nodemon/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/nodemon/node_modules/ms/license.md b/node_modules/nodemon/node_modules/ms/license.md new file mode 100644 index 00000000..69b61253 --- /dev/null +++ b/node_modules/nodemon/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/nodemon/node_modules/ms/package.json b/node_modules/nodemon/node_modules/ms/package.json new file mode 100644 index 00000000..eea666e1 --- /dev/null +++ b/node_modules/nodemon/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.1.2", + "description": "Tiny millisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.12.1", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1" + } +} diff --git a/node_modules/nodemon/node_modules/ms/readme.md b/node_modules/nodemon/node_modules/ms/readme.md new file mode 100644 index 00000000..9a1996b1 --- /dev/null +++ b/node_modules/nodemon/node_modules/ms/readme.md @@ -0,0 +1,60 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/nodemon/package.json b/node_modules/nodemon/package.json new file mode 100644 index 00000000..ca9cb1dc --- /dev/null +++ b/node_modules/nodemon/package.json @@ -0,0 +1 @@ +{"name":"nodemon","homepage":"http://nodemon.io","author":{"name":"Remy Sharp","url":"http://github.com/remy"},"bin":{"nodemon":"./bin/nodemon.js"},"engines":{"node":">=4"},"repository":{"type":"git","url":"https://github.com/remy/nodemon.git"},"description":"Simple monitor script for use during development of a node.js app.","keywords":["monitor","development","restart","autoload","reload","terminal"],"license":"MIT","main":"./lib/nodemon","scripts":{"commitmsg":"commitlint -e","coverage":"istanbul cover _mocha -- --timeout 30000 --ui bdd --reporter list test/**/*.test.js","lint":"jscs lib/**/*.js -v",":spec":"node_modules/.bin/mocha --timeout 30000 --ui bdd test/**/*.test.js","test":"npm run lint && npm run spec","spec":"for FILE in test/**/*.test.js; do echo $FILE; TEST=1 mocha --exit --timeout 30000 $FILE; if [ $? -ne 0 ]; then exit 1; fi; sleep 1; done","postspec":"npm run clean","clean":"rm -rf test/fixtures/test*.js test/fixtures/test*.md","web":"node web","semantic-release":"semantic-release pre && npm publish && semantic-release post","prepush":"npm run lint","killall":"ps auxww | grep node | grep -v grep | awk '{ print $2 }' | xargs kill -9","postinstall":"node bin/postinstall || exit 0"},"devDependencies":{"@commitlint/cli":"^3.1.3","@commitlint/config-angular":"^3.1.1","async":"1.4.2","coffee-script":"~1.7.1","husky":"^0.14.3","istanbul":"^0.4.5","jscs":"^3.0.7","mocha":"^2.3.3","proxyquire":"^1.8.0","semantic-release":"^8.2.0","should":"~4.0.0"},"dependencies":{"chokidar":"^2.1.5","debug":"^3.1.0","ignore-by-default":"^1.0.1","minimatch":"^3.0.4","pstree.remy":"^1.1.6","semver":"^5.5.0","supports-color":"^5.2.0","touch":"^3.1.0","undefsafe":"^2.0.2","update-notifier":"^2.5.0"},"version":"1.19.1"} diff --git a/node_modules/nopt/.gitignore b/node_modules/nopt/.gitignore new file mode 100644 index 00000000..e69de29b diff --git a/node_modules/nopt/LICENSE b/node_modules/nopt/LICENSE new file mode 100644 index 00000000..05a40109 --- /dev/null +++ b/node_modules/nopt/LICENSE @@ -0,0 +1,23 @@ +Copyright 2009, 2010, 2011 Isaac Z. Schlueter. +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/nopt/README.md b/node_modules/nopt/README.md new file mode 100644 index 00000000..eeddfd4f --- /dev/null +++ b/node_modules/nopt/README.md @@ -0,0 +1,208 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you go half-insane just trying to manage them all, and put +it off with duct-tape solutions until you see exactly to the core of the +problem, and finally snap and write an awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + + // my-program.js + var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many" : [String, Array] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + console.log(parsed) + +This would give you support for any of the following: + +```bash +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk 1000 -fp # unknown opts are ok. +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --blatzk true -fp # but they need a value +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many 1 --many null --many foo +{ many: ["1", "null", "foo"] } + +$ node my-program.js --many foo +{ many: ["foo"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents, and numeric values will be +interpreted as a number. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid +options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/node_modules/nopt/bin/nopt.js b/node_modules/nopt/bin/nopt.js new file mode 100755 index 00000000..df90c729 --- /dev/null +++ b/node_modules/nopt/bin/nopt.js @@ -0,0 +1,44 @@ +#!/usr/bin/env node +var nopt = require("../lib/nopt") + , types = { num: Number + , bool: Boolean + , help: Boolean + , list: Array + , "num-list": [Number, Array] + , "str-list": [String, Array] + , "bool-list": [Boolean, Array] + , str: String } + , shorthands = { s: [ "--str", "astring" ] + , b: [ "--bool" ] + , nb: [ "--no-bool" ] + , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] + , "?": ["--help"] + , h: ["--help"] + , H: ["--help"] + , n: [ "--num", "125" ] } + , parsed = nopt( types + , shorthands + , process.argv + , 2 ) + +console.log("parsed", parsed) + +if (parsed.help) { + console.log("") + console.log("nopt cli tester") + console.log("") + console.log("types") + console.log(Object.keys(types).map(function M (t) { + var type = types[t] + if (Array.isArray(type)) { + return [t, type.map(function (type) { return type.name })] + } + return [t, type && type.name] + }).reduce(function (s, i) { + s[i[0]] = i[1] + return s + }, {})) + console.log("") + console.log("shorthands") + console.log(shorthands) +} diff --git a/node_modules/nopt/examples/my-program.js b/node_modules/nopt/examples/my-program.js new file mode 100755 index 00000000..142447e1 --- /dev/null +++ b/node_modules/nopt/examples/my-program.js @@ -0,0 +1,30 @@ +#!/usr/bin/env node + +//process.env.DEBUG_NOPT = 1 + +// my-program.js +var nopt = require("../lib/nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag", "true"] + , "g" : ["--flag"] + , "s" : "--flag" + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + +console.log("parsed =\n"+ require("util").inspect(parsed)) diff --git a/node_modules/nopt/lib/nopt.js b/node_modules/nopt/lib/nopt.js new file mode 100644 index 00000000..ff802daf --- /dev/null +++ b/node_modules/nopt/lib/nopt.js @@ -0,0 +1,552 @@ +// info about each config option. + +var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + ? function () { console.error.apply(console, arguments) } + : function () {} + +var url = require("url") + , path = require("path") + , Stream = require("stream").Stream + , abbrev = require("abbrev") + +module.exports = exports = nopt +exports.clean = clean + +exports.typeDefs = + { String : { type: String, validate: validateString } + , Boolean : { type: Boolean, validate: validateBoolean } + , url : { type: url, validate: validateUrl } + , Number : { type: Number, validate: validateNumber } + , path : { type: path, validate: validatePath } + , Stream : { type: Stream, validate: validateStream } + , Date : { type: Date, validate: validateDate } + } + +function nopt (types, shorthands, args, slice) { + args = args || process.argv + types = types || {} + shorthands = shorthands || {} + if (typeof slice !== "number") slice = 2 + + debug(types, shorthands, args, slice) + + args = args.slice(slice) + var data = {} + , key + , remain = [] + , cooked = args + , original = args.slice(0) + + parse(args, data, remain, types, shorthands) + // now data is full + clean(data, types, exports.typeDefs) + data.argv = {remain:remain,cooked:cooked,original:original} + data.argv.toString = function () { + return this.original.map(JSON.stringify).join(" ") + } + return data +} + +function clean (data, types, typeDefs) { + typeDefs = typeDefs || exports.typeDefs + var remove = {} + , typeDefault = [false, true, null, String, Number] + + Object.keys(data).forEach(function (k) { + if (k === "argv") return + var val = data[k] + , isArray = Array.isArray(val) + , type = types[k] + if (!isArray) val = [val] + if (!type) type = typeDefault + if (type === Array) type = typeDefault.concat(Array) + if (!Array.isArray(type)) type = [type] + + debug("val=%j", val) + debug("types=", type) + val = val.map(function (val) { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof val === "string") { + debug("string %j", val) + val = val.trim() + if ((val === "null" && ~type.indexOf(null)) + || (val === "true" && + (~type.indexOf(true) || ~type.indexOf(Boolean))) + || (val === "false" && + (~type.indexOf(false) || ~type.indexOf(Boolean)))) { + val = JSON.parse(val) + debug("jsonable %j", val) + } else if (~type.indexOf(Number) && !isNaN(val)) { + debug("convert to number", val) + val = +val + } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { + debug("convert to date", val) + val = new Date(val) + } + } + + if (!types.hasOwnProperty(k)) { + return val + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (val === false && ~type.indexOf(null) && + !(~type.indexOf(false) || ~type.indexOf(Boolean))) { + val = null + } + + var d = {} + d[k] = val + debug("prevalidated val", d, val, types[k]) + if (!validate(d, k, val, types[k], typeDefs)) { + if (exports.invalidHandler) { + exports.invalidHandler(k, val, types[k], data) + } else if (exports.invalidHandler !== false) { + debug("invalid: "+k+"="+val, types[k]) + } + return remove + } + debug("validated val", d, val, types[k]) + return d[k] + }).filter(function (val) { return val !== remove }) + + if (!val.length) delete data[k] + else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else data[k] = val[0] + + debug("k=%s val=%j", k, val, data[k]) + }) +} + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + data[k] = path.resolve(String(val)) + return true +} + +function validateNumber (data, k, val) { + debug("validate Number %j %j %j", k, val, isNaN(val)) + if (isNaN(val)) return false + data[k] = +val +} + +function validateDate (data, k, val) { + debug("validate Date %j %j %j", k, val, Date.parse(val)) + var s = Date.parse(val) + if (isNaN(s)) return false + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (val instanceof Boolean) val = val.valueOf() + else if (typeof val === "string") { + if (!isNaN(val)) val = !!(+val) + else if (val === "null" || val === "false") val = false + else val = true + } else val = !!val + data[k] = val +} + +function validateUrl (data, k, val) { + val = url.parse(String(val)) + if (!val.host) return false + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) return false + data[k] = val +} + +function validate (data, k, val, type, typeDefs) { + // arrays are lists of types. + if (Array.isArray(type)) { + for (var i = 0, l = type.length; i < l; i ++) { + if (type[i] === Array) continue + if (validate(data, k, val, type[i], typeDefs)) return true + } + delete data[k] + return false + } + + // an array of anything? + if (type === Array) return true + + // NaN is poisonous. Means that something is not allowed. + if (type !== type) { + debug("Poison NaN", k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug("Explicitly allowed %j", val) + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + var ok = false + , types = Object.keys(typeDefs) + for (var i = 0, l = types.length; i < l; i ++) { + debug("test type %j %j %j", k, val, types[i]) + var t = typeDefs[types[i]] + if (t && type === t.type) { + var d = {} + ok = false !== t.validate(d, k, val) + val = d[k] + if (ok) { + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + break + } + } + } + debug("OK? %j (%j %j %j)", ok, k, val, types[i]) + + if (!ok) delete data[k] + return ok +} + +function parse (args, data, remain, types, shorthands) { + debug("parse", args, data, remain) + + var key = null + , abbrevs = abbrev(Object.keys(types)) + , shortAbbr = abbrev(Object.keys(shorthands)) + + for (var i = 0; i < args.length; i ++) { + var arg = args[i] + debug("arg", arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = "--" + break + } + if (arg.charAt(0) === "-") { + if (arg.indexOf("=") !== -1) { + var v = arg.split("=") + arg = v.shift() + v = v.join("=") + args.splice.apply(args, [i, 1].concat([arg, v])) + } + // see if it's a shorthand + // if so, splice and back up to re-parse it. + var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) + debug("arg=%j shRes=%j", arg, shRes) + if (shRes) { + debug(arg, shRes) + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i -- + continue + } + } + arg = arg.replace(/^-+/, "") + var no = false + while (arg.toLowerCase().indexOf("no-") === 0) { + no = !no + arg = arg.substr(3) + } + + if (abbrevs[arg]) arg = abbrevs[arg] + + var isArray = types[arg] === Array || + Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1 + + var val + , la = args[i + 1] + + var isBool = no || + types[arg] === Boolean || + Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 || + (la === "false" && + (types[arg] === null || + Array.isArray(types[arg]) && ~types[arg].indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === "true" || la === "false") { + val = JSON.parse(la) + la = null + if (no) val = !val + i ++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (Array.isArray(types[arg]) && la) { + if (~types[arg].indexOf(la)) { + // an explicit type + val = la + i ++ + } else if ( la === "null" && ~types[arg].indexOf(null) ) { + // null allowed + val = null + i ++ + } else if ( !la.match(/^-{2,}[^-]/) && + !isNaN(la) && + ~types[arg].indexOf(Number) ) { + // number + val = +la + i ++ + } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) { + // string + val = la + i ++ + } + } + + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + continue + } + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i -- + } + + val = la === undefined ? true : la + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + i ++ + continue + } + remain.push(arg) + } +} + +function resolveShort (arg, shorthands, shortAbbr, abbrevs) { + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + if (abbrevs[arg] && !shorthands[arg]) { + return null + } + if (shortAbbr[arg]) { + arg = shortAbbr[arg] + } else { + var singles = shorthands.___singles + if (!singles) { + singles = Object.keys(shorthands).filter(function (s) { + return s.length === 1 + }).reduce(function (l,r) { l[r] = true ; return l }, {}) + shorthands.___singles = singles + } + var chrs = arg.split("").filter(function (c) { + return singles[c] + }) + if (chrs.join("") === arg) return chrs.map(function (c) { + return shorthands[c] + }).reduce(function (l, r) { + return l.concat(r) + }, []) + } + + if (shorthands[arg] && !Array.isArray(shorthands[arg])) { + shorthands[arg] = shorthands[arg].split(/\s+/) + } + return shorthands[arg] +} + +if (module === require.main) { +var assert = require("assert") + , util = require("util") + + , shorthands = + { s : ["--loglevel", "silent"] + , d : ["--loglevel", "info"] + , dd : ["--loglevel", "verbose"] + , ddd : ["--loglevel", "silly"] + , noreg : ["--no-registry"] + , reg : ["--registry"] + , "no-reg" : ["--no-registry"] + , silent : ["--loglevel", "silent"] + , verbose : ["--loglevel", "verbose"] + , h : ["--usage"] + , H : ["--usage"] + , "?" : ["--usage"] + , help : ["--usage"] + , v : ["--version"] + , f : ["--force"] + , desc : ["--description"] + , "no-desc" : ["--no-description"] + , "local" : ["--no-global"] + , l : ["--long"] + , p : ["--parseable"] + , porcelain : ["--parseable"] + , g : ["--global"] + } + + , types = + { aoa: Array + , nullstream: [null, Stream] + , date: Date + , str: String + , browser : String + , cache : path + , color : ["always", Boolean] + , depth : Number + , description : Boolean + , dev : Boolean + , editor : path + , force : Boolean + , global : Boolean + , globalconfig : path + , group : [String, Number] + , gzipbin : String + , logfd : [Number, Stream] + , loglevel : ["silent","win","error","warn","info","verbose","silly"] + , long : Boolean + , "node-version" : [false, String] + , npaturl : url + , npat : Boolean + , "onload-script" : [false, String] + , outfd : [Number, Stream] + , parseable : Boolean + , pre: Boolean + , prefix: path + , proxy : url + , "rebuild-bundle" : Boolean + , registry : url + , searchopts : String + , searchexclude: [null, String] + , shell : path + , t: [Array, String] + , tag : String + , tar : String + , tmp : path + , "unsafe-perm" : Boolean + , usage : Boolean + , user : String + , username : String + , userconfig : path + , version : Boolean + , viewer: path + , _exit : Boolean + } + +; [["-v", {version:true}, []] + ,["---v", {version:true}, []] + ,["ls -s --no-reg connect -d", + {loglevel:"info",registry:null},["ls","connect"]] + ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]] + ,["ls --registry blargle", {}, ["ls"]] + ,["--no-registry", {registry:null}, []] + ,["--no-color true", {color:false}, []] + ,["--no-color false", {color:true}, []] + ,["--no-color", {color:false}, []] + ,["--color false", {color:false}, []] + ,["--color --logfd 7", {logfd:7,color:true}, []] + ,["--color=true", {color:true}, []] + ,["--logfd=10", {logfd:10}, []] + ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]] + ,["--tmp=tmp -tar=gtar", + {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]] + ,["--logfd x", {}, []] + ,["a -true -- -no-false", {true:true},["a","-no-false"]] + ,["a -no-false", {false:false},["a"]] + ,["a -no-no-true", {true:true}, ["a"]] + ,["a -no-no-no-false", {false:false}, ["a"]] + ,["---NO-no-No-no-no-no-nO-no-no"+ + "-No-no-no-no-no-no-no-no-no"+ + "-no-no-no-no-NO-NO-no-no-no-no-no-no"+ + "-no-body-can-do-the-boogaloo-like-I-do" + ,{"body-can-do-the-boogaloo-like-I-do":false}, []] + ,["we are -no-strangers-to-love "+ + "--you-know the-rules --and so-do-i "+ + "---im-thinking-of=a-full-commitment "+ + "--no-you-would-get-this-from-any-other-guy "+ + "--no-gonna-give-you-up "+ + "-no-gonna-let-you-down=true "+ + "--no-no-gonna-run-around false "+ + "--desert-you=false "+ + "--make-you-cry false "+ + "--no-tell-a-lie "+ + "--no-no-and-hurt-you false" + ,{"strangers-to-love":false + ,"you-know":"the-rules" + ,"and":"so-do-i" + ,"you-would-get-this-from-any-other-guy":false + ,"gonna-give-you-up":false + ,"gonna-let-you-down":false + ,"gonna-run-around":false + ,"desert-you":false + ,"make-you-cry":false + ,"tell-a-lie":false + ,"and-hurt-you":false + },["we", "are"]] + ,["-t one -t two -t three" + ,{t: ["one", "two", "three"]} + ,[]] + ,["-t one -t null -t three four five null" + ,{t: ["one", "null", "three"]} + ,["four", "five", "null"]] + ,["-t foo" + ,{t:["foo"]} + ,[]] + ,["--no-t" + ,{t:["false"]} + ,[]] + ,["-no-no-t" + ,{t:["true"]} + ,[]] + ,["-aoa one -aoa null -aoa 100" + ,{aoa:["one", null, 100]} + ,[]] + ,["-str 100" + ,{str:"100"} + ,[]] + ,["--color always" + ,{color:"always"} + ,[]] + ,["--no-nullstream" + ,{nullstream:null} + ,[]] + ,["--nullstream false" + ,{nullstream:null} + ,[]] + ,["--notadate 2011-01-25" + ,{notadate: "2011-01-25"} + ,[]] + ,["--date 2011-01-25" + ,{date: new Date("2011-01-25")} + ,[]] + ].forEach(function (test) { + var argv = test[0].split(/\s+/) + , opts = test[1] + , rem = test[2] + , actual = nopt(types, shorthands, argv, 0) + , parsed = actual.argv + delete actual.argv + console.log(util.inspect(actual, false, 2, true), parsed.remain) + for (var i in opts) { + var e = JSON.stringify(opts[i]) + , a = JSON.stringify(actual[i] === undefined ? null : actual[i]) + if (e && typeof e === "object") { + assert.deepEqual(e, a) + } else { + assert.equal(e, a) + } + } + assert.deepEqual(rem, parsed.remain) + }) +} diff --git a/node_modules/nopt/package.json b/node_modules/nopt/package.json new file mode 100644 index 00000000..d1118e39 --- /dev/null +++ b/node_modules/nopt/package.json @@ -0,0 +1,12 @@ +{ "name" : "nopt" +, "version" : "1.0.10" +, "description" : "Option parsing for Node, supporting types, shorthands, etc. Used by npm." +, "author" : "Isaac Z. Schlueter (http://blog.izs.me/)" +, "main" : "lib/nopt.js" +, "scripts" : { "test" : "node lib/nopt.js" } +, "repository" : "http://github.com/isaacs/nopt" +, "bin" : "./bin/nopt.js" +, "license" : + { "type" : "MIT" + , "url" : "https://github.com/isaacs/nopt/raw/master/LICENSE" } +, "dependencies" : { "abbrev" : "1" }} diff --git a/node_modules/normalize-path/LICENSE b/node_modules/normalize-path/LICENSE new file mode 100644 index 00000000..d32ab442 --- /dev/null +++ b/node_modules/normalize-path/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/normalize-path/README.md b/node_modules/normalize-path/README.md new file mode 100644 index 00000000..726d4d68 --- /dev/null +++ b/node_modules/normalize-path/README.md @@ -0,0 +1,127 @@ +# normalize-path [![NPM version](https://img.shields.io/npm/v/normalize-path.svg?style=flat)](https://www.npmjs.com/package/normalize-path) [![NPM monthly downloads](https://img.shields.io/npm/dm/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![NPM total downloads](https://img.shields.io/npm/dt/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/normalize-path.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/normalize-path) + +> Normalize slashes in a file path to be posix/unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes, unless disabled. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save normalize-path +``` + +## Usage + +```js +const normalize = require('normalize-path'); + +console.log(normalize('\\foo\\bar\\baz\\')); +//=> '/foo/bar/baz' +``` + +**win32 namespaces** + +```js +console.log(normalize('\\\\?\\UNC\\Server01\\user\\docs\\Letter.txt')); +//=> '//?/UNC/Server01/user/docs/Letter.txt' + +console.log(normalize('\\\\.\\CdRomX')); +//=> '//./CdRomX' +``` + +**Consecutive slashes** + +Condenses multiple consecutive forward slashes (except for leading slashes in win32 namespaces) to a single slash. + +```js +console.log(normalize('.//foo//bar///////baz/')); +//=> './foo/bar/baz' +``` + +### Trailing slashes + +By default trailing slashes are removed. Pass `false` as the last argument to disable this behavior and _**keep** trailing slashes_: + +```js +console.log(normalize('foo\\bar\\baz\\', false)); //=> 'foo/bar/baz/' +console.log(normalize('./foo/bar/baz/', false)); //=> './foo/bar/baz/' +``` + +## Release history + +### v3.0 + +No breaking changes in this release. + +* a check was added to ensure that [win32 namespaces](https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces) are handled properly by win32 `path.parse()` after a path has been normalized by this library. +* a minor optimization was made to simplify how the trailing separator was handled + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +Other useful path-related libraries: + +* [contains-path](https://www.npmjs.com/package/contains-path): Return true if a file path contains the given path. | [homepage](https://github.com/jonschlinkert/contains-path "Return true if a file path contains the given path.") +* [is-absolute](https://www.npmjs.com/package/is-absolute): Returns true if a file path is absolute. Does not rely on the path module… [more](https://github.com/jonschlinkert/is-absolute) | [homepage](https://github.com/jonschlinkert/is-absolute "Returns true if a file path is absolute. Does not rely on the path module and can be used as a polyfill for node.js native `path.isAbolute`.") +* [is-relative](https://www.npmjs.com/package/is-relative): Returns `true` if the path appears to be relative. | [homepage](https://github.com/jonschlinkert/is-relative "Returns `true` if the path appears to be relative.") +* [parse-filepath](https://www.npmjs.com/package/parse-filepath): Pollyfill for node.js `path.parse`, parses a filepath into an object. | [homepage](https://github.com/jonschlinkert/parse-filepath "Pollyfill for node.js `path.parse`, parses a filepath into an object.") +* [path-ends-with](https://www.npmjs.com/package/path-ends-with): Return `true` if a file path ends with the given string/suffix. | [homepage](https://github.com/jonschlinkert/path-ends-with "Return `true` if a file path ends with the given string/suffix.") +* [unixify](https://www.npmjs.com/package/unixify): Convert Windows file paths to unix paths. | [homepage](https://github.com/jonschlinkert/unixify "Convert Windows file paths to unix paths.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 35 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [phated](https://github.com/phated) | + +### Author + +**Jon Schlinkert** + +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on April 19, 2018._ \ No newline at end of file diff --git a/node_modules/normalize-path/index.js b/node_modules/normalize-path/index.js new file mode 100644 index 00000000..6fac553a --- /dev/null +++ b/node_modules/normalize-path/index.js @@ -0,0 +1,35 @@ +/*! + * normalize-path + * + * Copyright (c) 2014-2018, Jon Schlinkert. + * Released under the MIT License. + */ + +module.exports = function(path, stripTrailing) { + if (typeof path !== 'string') { + throw new TypeError('expected path to be a string'); + } + + if (path === '\\' || path === '/') return '/'; + + var len = path.length; + if (len <= 1) return path; + + // ensure that win32 namespaces has two leading slashes, so that the path is + // handled properly by the win32 version of path.parse() after being normalized + // https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces + var prefix = ''; + if (len > 4 && path[3] === '\\') { + var ch = path[2]; + if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') { + path = path.slice(2); + prefix = '//'; + } + } + + var segs = path.split(/[/\\]+/); + if (stripTrailing !== false && segs[segs.length - 1] === '') { + segs.pop(); + } + return prefix + segs.join('/'); +}; diff --git a/node_modules/normalize-path/package.json b/node_modules/normalize-path/package.json new file mode 100644 index 00000000..ad61098a --- /dev/null +++ b/node_modules/normalize-path/package.json @@ -0,0 +1,77 @@ +{ + "name": "normalize-path", + "description": "Normalize slashes in a file path to be posix/unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes, unless disabled.", + "version": "3.0.0", + "homepage": "https://github.com/jonschlinkert/normalize-path", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Blaine Bublitz (https://twitter.com/BlaineBublitz)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/normalize-path", + "bugs": { + "url": "https://github.com/jonschlinkert/normalize-path/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "minimist": "^1.2.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "absolute", + "backslash", + "delimiter", + "file", + "file-path", + "filepath", + "fix", + "forward", + "fp", + "fs", + "normalize", + "path", + "relative", + "separator", + "slash", + "slashes", + "trailing", + "unix", + "urix" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "description": "Other useful path-related libraries:", + "list": [ + "contains-path", + "is-absolute", + "is-relative", + "parse-filepath", + "path-ends-with", + "path-ends-with", + "unixify" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/npm-bundled/LICENSE b/node_modules/npm-bundled/LICENSE new file mode 100644 index 00000000..20a47625 --- /dev/null +++ b/node_modules/npm-bundled/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-bundled/README.md b/node_modules/npm-bundled/README.md new file mode 100644 index 00000000..fcfb2322 --- /dev/null +++ b/node_modules/npm-bundled/README.md @@ -0,0 +1,48 @@ +# npm-bundled + +Run this in a node package, and it'll tell you which things in +node_modules are bundledDependencies, or transitive dependencies of +bundled dependencies. + +[![Build Status](https://travis-ci.org/npm/npm-bundled.svg?branch=master)](https://travis-ci.org/npm/npm-bundled) + +## USAGE + +To get the list of deps at the top level that are bundled (or +transitive deps of a bundled dep) run this: + +```js +const bundled = require('npm-bundled') + +// async version +bundled({ path: '/path/to/pkg/defaults/to/cwd'}, (er, list) => { + // er means it had an error, which is _hella_ weird + // list is a list of package names, like `fooblz` or `@corp/blerg` + // the might not all be deps of the top level, because transitives +}) + +// async promise version +bundled({ path: '/path/to/pkg/defaults/to/cwd'}).then(list => { + // so promisey! + // actually the callback version returns a promise, too, it just + // attaches the supplied callback to the promise +}) + +// sync version, throws if there's an error +const list = bundled({ path: '/path/to/pkg/defaults/to/cwd'}) +``` + +That's basically all you need to know. If you care to dig into it, +you can also use the `bundled.Walker` and `bundled.WalkerSync` +classes to get fancy. + +This library does not write anything to the filesystem, but it _may_ +have undefined behavior if the structure of `node_modules` changes +while it's reading deps. + +All symlinks are followed. This means that it can lead to surprising +results if a symlinked bundled dependency has a missing dependency +that is satisfied at the top level. Since package creation resolves +symlinks as well, this is an edge case where package creation and +development environment are not going to be aligned, and is best +avoided. diff --git a/node_modules/npm-bundled/index.js b/node_modules/npm-bundled/index.js new file mode 100644 index 00000000..bde0acd1 --- /dev/null +++ b/node_modules/npm-bundled/index.js @@ -0,0 +1,241 @@ +'use strict' + +// walk the tree of deps starting from the top level list of bundled deps +// Any deps at the top level that are depended on by a bundled dep that +// does not have that dep in its own node_modules folder are considered +// bundled deps as well. This list of names can be passed to npm-packlist +// as the "bundled" argument. Additionally, packageJsonCache is shared so +// packlist doesn't have to re-read files already consumed in this pass + +const fs = require('fs') +const path = require('path') +const EE = require('events').EventEmitter + +class BundleWalker extends EE { + constructor (opt) { + opt = opt || {} + super(opt) + this.path = path.resolve(opt.path || process.cwd()) + + this.parent = opt.parent || null + if (this.parent) { + this.result = this.parent.result + // only collect results in node_modules folders at the top level + // since the node_modules in a bundled dep is included always + if (!this.parent.parent) { + const base = path.basename(this.path) + const scope = path.basename(path.dirname(this.path)) + this.result.add(/^@/.test(scope) ? scope + '/' + base : base) + } + this.root = this.parent.root + this.packageJsonCache = this.parent.packageJsonCache + } else { + this.result = new Set() + this.root = this.path + this.packageJsonCache = opt.packageJsonCache || new Map() + } + + this.seen = new Set() + this.didDone = false + this.children = 0 + this.node_modules = [] + this.package = null + this.bundle = null + } + + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'done' && this.didDone) { + this.emit('done', this.result) + } + return ret + } + + done () { + if (!this.didDone) { + this.didDone = true + if (!this.parent) { + const res = Array.from(this.result) + this.result = res + this.emit('done', res) + } else { + this.emit('done') + } + } + } + + start () { + const pj = path.resolve(this.path, 'package.json') + if (this.packageJsonCache.has(pj)) + this.onPackage(this.packageJsonCache.get(pj)) + else + this.readPackageJson(pj) + return this + } + + readPackageJson (pj) { + fs.readFile(pj, (er, data) => + er ? this.done() : this.onPackageJson(pj, data)) + } + + onPackageJson (pj, data) { + try { + this.package = JSON.parse(data + '') + } catch (er) { + return this.done() + } + this.packageJsonCache.set(pj, this.package) + this.onPackage(this.package) + } + + onPackage (pkg) { + // all deps are bundled if we got here as a child. + // otherwise, only bundle bundledDeps + // Get a unique-ified array with a short-lived Set + const bdRaw = this.parent + ? Object.keys(pkg.dependencies || {}).concat( + Object.keys(pkg.optionalDependencies || {})) + : pkg.bundleDependencies || pkg.bundledDependencies || [] + + const bd = Array.from(new Set( + Array.isArray(bdRaw) ? bdRaw : Object.keys(bdRaw))) + + if (!bd.length) + return this.done() + + this.bundle = bd + const nm = this.path + '/node_modules' + this.readModules() + } + + readModules () { + readdirNodeModules(this.path + '/node_modules', (er, nm) => + er ? this.onReaddir([]) : this.onReaddir(nm)) + } + + onReaddir (nm) { + // keep track of what we have, in case children need it + this.node_modules = nm + + this.bundle.forEach(dep => this.childDep(dep)) + if (this.children === 0) + this.done() + } + + childDep (dep) { + if (this.node_modules.indexOf(dep) !== -1 && !this.seen.has(dep)) { + this.seen.add(dep) + this.child(dep) + } else if (this.parent) { + this.parent.childDep(dep) + } + } + + child (dep) { + const p = this.path + '/node_modules/' + dep + this.children += 1 + const child = new BundleWalker({ + path: p, + parent: this + }) + child.on('done', _ => { + if (--this.children === 0) + this.done() + }) + child.start() + } +} + +class BundleWalkerSync extends BundleWalker { + constructor (opt) { + super(opt) + } + + start () { + super.start() + this.done() + return this + } + + readPackageJson (pj) { + try { + this.onPackageJson(pj, fs.readFileSync(pj)) + } catch (er) {} + return this + } + + readModules () { + try { + this.onReaddir(readdirNodeModulesSync(this.path + '/node_modules')) + } catch (er) { + this.onReaddir([]) + } + } + + child (dep) { + new BundleWalkerSync({ + path: this.path + '/node_modules/' + dep, + parent: this + }).start() + } +} + +const readdirNodeModules = (nm, cb) => { + fs.readdir(nm, (er, set) => { + if (er) + cb(er) + else { + const scopes = set.filter(f => /^@/.test(f)) + if (!scopes.length) + cb(null, set) + else { + const unscoped = set.filter(f => !/^@/.test(f)) + let count = scopes.length + scopes.forEach(scope => { + fs.readdir(nm + '/' + scope, (er, pkgs) => { + if (er || !pkgs.length) + unscoped.push(scope) + else + unscoped.push.apply(unscoped, pkgs.map(p => scope + '/' + p)) + if (--count === 0) + cb(null, unscoped) + }) + }) + } + } + }) +} + +const readdirNodeModulesSync = nm => { + const set = fs.readdirSync(nm) + const unscoped = set.filter(f => !/^@/.test(f)) + const scopes = set.filter(f => /^@/.test(f)).map(scope => { + try { + const pkgs = fs.readdirSync(nm + '/' + scope) + return pkgs.length ? pkgs.map(p => scope + '/' + p) : [scope] + } catch (er) { + return [scope] + } + }).reduce((a, b) => a.concat(b), []) + return unscoped.concat(scopes) +} + +const walk = (options, callback) => { + const p = new Promise((resolve, reject) => { + new BundleWalker(options).on('done', resolve).on('error', reject).start() + }) + return callback ? p.then(res => callback(null, res), callback) : p +} + +const walkSync = options => { + return new BundleWalkerSync(options).start().result +} + +module.exports = walk +walk.sync = walkSync +walk.BundleWalker = BundleWalker +walk.BundleWalkerSync = BundleWalkerSync diff --git a/node_modules/npm-bundled/package.json b/node_modules/npm-bundled/package.json new file mode 100644 index 00000000..10b04828 --- /dev/null +++ b/node_modules/npm-bundled/package.json @@ -0,0 +1,27 @@ +{ + "name": "npm-bundled", + "version": "1.0.6", + "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/npm-bundled.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "devDependencies": { + "mkdirp": "^0.5.1", + "mutate-fs": "^1.1.0", + "rimraf": "^2.6.1", + "tap": "^12.0.1" + }, + "scripts": { + "test": "tap test/*.js -J --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "files": [ + "index.js" + ] +} diff --git a/node_modules/npm-packlist/LICENSE b/node_modules/npm-packlist/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/npm-packlist/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-packlist/README.md b/node_modules/npm-packlist/README.md new file mode 100644 index 00000000..ead5821e --- /dev/null +++ b/node_modules/npm-packlist/README.md @@ -0,0 +1,68 @@ +# npm-packlist + +[![Build Status](https://travis-ci.com/npm/npm-packlist.svg?token=hHeDp9pQmz9kvsgRNVHy&branch=master)](https://travis-ci.com/npm/npm-packlist) + +Get a list of the files to add from a folder into an npm package + +These can be handed to [tar](http://npm.im/tar) like so to make an npm +package tarball: + +```js +const packlist = require('npm-packlist') +const tar = require('tar') +const packageDir = '/path/to/package' +const packageTarball = '/path/to/package.tgz' + +packlist({ path: packageDir }) + .then(files => tar.create({ + prefix: 'package/', + cwd: packageDir, + file: packageTarball, + gzip: true + }, files)) + .then(_ => { + // tarball has been created, continue with your day + }) +``` + +This uses the following rules: + +1. If a `package.json` file is found, and it has a `files` list, + then ignore everything that isn't in `files`. Always include the + readme, license, notice, changes, changelog, and history files, if + they exist, and the package.json file itself. +2. If there's no `package.json` file (or it has no `files` list), and + there is a `.npmignore` file, then ignore all the files in the + `.npmignore` file. +3. If there's no `package.json` with a `files` list, and there's no + `.npmignore` file, but there is a `.gitignore` file, then ignore + all the files in the `.gitignore` file. +4. Everything in the root `node_modules` is ignored, unless it's a + bundled dependency. If it IS a bundled dependency, and it's a + symbolic link, then the target of the link is included, not the + symlink itself. +4. Unless they're explicitly included (by being in a `files` list, or + a `!negated` rule in a relevant `.npmignore` or `.gitignore`), + always ignore certain common cruft files: + + 1. .npmignore and .gitignore files (their effect is in the package + already, there's no need to include them in the package) + 2. editor junk like `.*.swp`, `._*` and `.*.orig` files + 3. `.npmrc` files (these may contain private configs) + 4. The `node_modules/.bin` folder + 5. Waf and gyp cruft like `/build/config.gypi` and `.lock-wscript` + 6. Darwin's `.DS_Store` files because wtf are those even + 7. `npm-debug.log` files at the root of a project + + You can explicitly re-include any of these with a `files` list in + `package.json` or a negated ignore file rule. + +## API + +Same API as [ignore-walk](http://npm.im/ignore-walk), just hard-coded +file list and rule sets. + +The `Walker` and `WalkerSync` classes take a `bundled` argument, which +is a list of package names to include from node_modules. When calling +the top-level `packlist()` and `packlist.sync()` functions, this +module calls into `npm-bundled` directly. diff --git a/node_modules/npm-packlist/index.js b/node_modules/npm-packlist/index.js new file mode 100644 index 00000000..8bfd2577 --- /dev/null +++ b/node_modules/npm-packlist/index.js @@ -0,0 +1,274 @@ +'use strict' + +// Do a two-pass walk, first to get the list of packages that need to be +// bundled, then again to get the actual files and folders. +// Keep a cache of node_modules content and package.json data, so that the +// second walk doesn't have to re-do all the same work. + +const bundleWalk = require('npm-bundled') +const BundleWalker = bundleWalk.BundleWalker +const BundleWalkerSync = bundleWalk.BundleWalkerSync + +const ignoreWalk = require('ignore-walk') +const IgnoreWalker = ignoreWalk.Walker +const IgnoreWalkerSync = ignoreWalk.WalkerSync + +const rootBuiltinRules = Symbol('root-builtin-rules') +const packageNecessaryRules = Symbol('package-necessary-rules') +const path = require('path') + +const defaultRules = [ + '.npmignore', + '.gitignore', + '**/.git', + '**/.svn', + '**/.hg', + '**/CVS', + '**/.git/**', + '**/.svn/**', + '**/.hg/**', + '**/CVS/**', + '/.lock-wscript', + '/.wafpickle-*', + '/build/config.gypi', + 'npm-debug.log', + '**/.npmrc', + '.*.swp', + '.DS_Store', + '**/.DS_Store/**', + '._*', + '**/._*/**', + '*.orig', + '/package-lock.json', + '/yarn.lock', + 'archived-packages/**', + 'core', + '!core/', + '!**/core/', + '*.core', + '*.vgcore', + 'vgcore.*', + 'core.+([0-9])', +] + +// a decorator that applies our custom rules to an ignore walker +const npmWalker = Class => class Walker extends Class { + constructor (opt) { + opt = opt || {} + + // the order in which rules are applied. + opt.ignoreFiles = [ + rootBuiltinRules, + 'package.json', + '.npmignore', + '.gitignore', + packageNecessaryRules + ] + + opt.includeEmpty = false + opt.path = opt.path || process.cwd() + const dirName = path.basename(opt.path) + const parentName = path.basename(path.dirname(opt.path)) + opt.follow = + dirName === 'node_modules' || + (parentName === 'node_modules' && /^@/.test(dirName)) + super(opt) + + // ignore a bunch of things by default at the root level. + // also ignore anything in node_modules, except bundled dependencies + if (!this.parent) { + this.bundled = opt.bundled || [] + this.bundledScopes = Array.from(new Set( + this.bundled.filter(f => /^@/.test(f)) + .map(f => f.split('/')[0]))) + const rules = defaultRules.join('\n') + '\n' + this.packageJsonCache = opt.packageJsonCache || new Map() + super.onReadIgnoreFile(rootBuiltinRules, rules, _=>_) + } else { + this.bundled = [] + this.bundledScopes = [] + this.packageJsonCache = this.parent.packageJsonCache + } + } + + onReaddir (entries) { + if (!this.parent) { + entries = entries.filter(e => + e !== '.git' && + !(e === 'node_modules' && this.bundled.length === 0) + ) + } + return super.onReaddir(entries) + } + + filterEntry (entry, partial) { + // get the partial path from the root of the walk + const p = this.path.substr(this.root.length + 1) + const pkgre = /^node_modules\/(@[^\/]+\/?[^\/]+|[^\/]+)(\/.*)?$/ + const isRoot = !this.parent + const pkg = isRoot && pkgre.test(entry) ? + entry.replace(pkgre, '$1') : null + const rootNM = isRoot && entry === 'node_modules' + const rootPJ = isRoot && entry === 'package.json' + + return ( + // if we're in a bundled package, check with the parent. + /^node_modules($|\/)/i.test(p) ? this.parent.filterEntry( + this.basename + '/' + entry, partial) + + // if package is bundled, all files included + // also include @scope dirs for bundled scoped deps + // they'll be ignored if no files end up in them. + // However, this only matters if we're in the root. + // node_modules folders elsewhere, like lib/node_modules, + // should be included normally unless ignored. + : pkg ? -1 !== this.bundled.indexOf(pkg) || + -1 !== this.bundledScopes.indexOf(pkg) + + // only walk top node_modules if we want to bundle something + : rootNM ? !!this.bundled.length + + // always include package.json at the root. + : rootPJ ? true + + // otherwise, follow ignore-walk's logic + : super.filterEntry(entry, partial) + ) + } + + filterEntries () { + if (this.ignoreRules['package.json']) + this.ignoreRules['.gitignore'] = this.ignoreRules['.npmignore'] = null + else if (this.ignoreRules['.npmignore']) + this.ignoreRules['.gitignore'] = null + this.filterEntries = super.filterEntries + super.filterEntries() + } + + addIgnoreFile (file, then) { + const ig = path.resolve(this.path, file) + if (this.packageJsonCache.has(ig)) + this.onPackageJson(ig, this.packageJsonCache.get(ig), then) + else + super.addIgnoreFile(file, then) + } + + onPackageJson (ig, pkg, then) { + this.packageJsonCache.set(ig, pkg) + + // if there's a bin, browser or main, make sure we don't ignore it + // also, don't ignore the package.json itself! + // + // Weird side-effect of this: a readme (etc) file will be included + // if it exists anywhere within a folder with a package.json file. + // The original intent was only to include these files in the root, + // but now users in the wild are dependent on that behavior for + // localized documentation and other use cases. Adding a `/` to + // these rules, while tempting and arguably more "correct", is a + // breaking change. + const rules = [ + pkg.browser ? '!' + pkg.browser : '', + pkg.main ? '!' + pkg.main : '', + '!package.json', + '!@(readme|copying|license|licence|notice|changes|changelog|history){,.*[^~$]}' + ] + if (pkg.bin) + if (typeof pkg.bin === "object") + for (const key in pkg.bin) + rules.push('!' + pkg.bin[key]) + else + rules.push('!' + pkg.bin) + + const data = rules.filter(f => f).join('\n') + '\n' + super.onReadIgnoreFile(packageNecessaryRules, data, _=>_) + + if (Array.isArray(pkg.files)) + super.onReadIgnoreFile('package.json', '*\n' + pkg.files.map( + f => '!' + f + '\n!' + f.replace(/\/+$/, '') + '/**' + ).join('\n') + '\n', then) + else + then() + } + + // override parent onstat function to nix all symlinks + onstat (st, entry, file, dir, then) { + if (st.isSymbolicLink()) + then() + else + super.onstat(st, entry, file, dir, then) + } + + onReadIgnoreFile (file, data, then) { + if (file === 'package.json') + try { + const ig = path.resolve(this.path, file) + this.onPackageJson(ig, JSON.parse(data), then) + } catch (er) { + // ignore package.json files that are not json + then() + } + else + super.onReadIgnoreFile(file, data, then) + } + + sort (a, b) { + return sort(a, b) + } +} + +class Walker extends npmWalker(IgnoreWalker) { + walker (entry, then) { + new Walker(this.walkerOpt(entry)).on('done', then).start() + } +} + +class WalkerSync extends npmWalker(IgnoreWalkerSync) { + walker (entry, then) { + new WalkerSync(this.walkerOpt(entry)).start() + then() + } +} + +const walk = (options, callback) => { + options = options || {} + const p = new Promise((resolve, reject) => { + const bw = new BundleWalker(options) + bw.on('done', bundled => { + options.bundled = bundled + options.packageJsonCache = bw.packageJsonCache + new Walker(options).on('done', resolve).on('error', reject).start() + }) + bw.start() + }) + return callback ? p.then(res => callback(null, res), callback) : p +} + +const walkSync = options => { + options = options || {} + const bw = new BundleWalkerSync(options).start() + options.bundled = bw.result + options.packageJsonCache = bw.packageJsonCache + const walker = new WalkerSync(options) + walker.start() + return walker.result +} + +// optimize for compressibility +// extname, then basename, then locale alphabetically +// https://twitter.com/isntitvacant/status/1131094910923231232 +const sort = (a, b) => { + const exta = path.extname(a).toLowerCase() + const extb = path.extname(b).toLowerCase() + const basea = path.basename(a).toLowerCase() + const baseb = path.basename(b).toLowerCase() + + return exta.localeCompare(extb) || + basea.localeCompare(baseb) || + a.localeCompare(b) +} + + +module.exports = walk +walk.sync = walkSync +walk.Walker = Walker +walk.WalkerSync = WalkerSync diff --git a/node_modules/npm-packlist/package.json b/node_modules/npm-packlist/package.json new file mode 100644 index 00000000..3fcff4d7 --- /dev/null +++ b/node_modules/npm-packlist/package.json @@ -0,0 +1,38 @@ +{ + "name": "npm-packlist", + "version": "1.4.4", + "description": "Get a list of the files to add from a folder into an npm package", + "directories": { + "test": "test" + }, + "main": "index.js", + "dependencies": { + "ignore-walk": "^3.0.1", + "npm-bundled": "^1.0.1" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.js" + ], + "devDependencies": { + "mkdirp": "^0.5.1", + "rimraf": "^2.6.1", + "tap": "^14.2.1" + }, + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/npm-packlist.git" + }, + "bugs": { + "url": "https://github.com/npm/npm-packlist/issues" + }, + "homepage": "https://www.npmjs.com/package/npm-packlist" +} diff --git a/node_modules/npm-run-path/index.js b/node_modules/npm-run-path/index.js new file mode 100644 index 00000000..56f31e47 --- /dev/null +++ b/node_modules/npm-run-path/index.js @@ -0,0 +1,39 @@ +'use strict'; +const path = require('path'); +const pathKey = require('path-key'); + +module.exports = opts => { + opts = Object.assign({ + cwd: process.cwd(), + path: process.env[pathKey()] + }, opts); + + let prev; + let pth = path.resolve(opts.cwd); + const ret = []; + + while (prev !== pth) { + ret.push(path.join(pth, 'node_modules/.bin')); + prev = pth; + pth = path.resolve(pth, '..'); + } + + // ensure the running `node` binary is used + ret.push(path.dirname(process.execPath)); + + return ret.concat(opts.path).join(path.delimiter); +}; + +module.exports.env = opts => { + opts = Object.assign({ + env: process.env + }, opts); + + const env = Object.assign({}, opts.env); + const path = pathKey({env}); + + opts.path = env[path]; + env[path] = module.exports(opts); + + return env; +}; diff --git a/node_modules/npm-run-path/license b/node_modules/npm-run-path/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/npm-run-path/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-run-path/package.json b/node_modules/npm-run-path/package.json new file mode 100644 index 00000000..3c27504c --- /dev/null +++ b/node_modules/npm-run-path/package.json @@ -0,0 +1,45 @@ +{ + "name": "npm-run-path", + "version": "2.0.2", + "description": "Get your PATH prepended with locally installed binaries", + "license": "MIT", + "repository": "sindresorhus/npm-run-path", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "npm", + "run", + "path", + "package", + "bin", + "binary", + "binaries", + "script", + "cli", + "command-line", + "execute", + "executable" + ], + "dependencies": { + "path-key": "^2.0.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/npm-run-path/readme.md b/node_modules/npm-run-path/readme.md new file mode 100644 index 00000000..4ff4722a --- /dev/null +++ b/node_modules/npm-run-path/readme.md @@ -0,0 +1,81 @@ +# npm-run-path [![Build Status](https://travis-ci.org/sindresorhus/npm-run-path.svg?branch=master)](https://travis-ci.org/sindresorhus/npm-run-path) + +> Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries + +In [npm run scripts](https://docs.npmjs.com/cli/run-script) you can execute locally installed binaries by name. This enables the same outside npm. + + +## Install + +``` +$ npm install --save npm-run-path +``` + + +## Usage + +```js +const childProcess = require('child_process'); +const npmRunPath = require('npm-run-path'); + +console.log(process.env.PATH); +//=> '/usr/local/bin' + +console.log(npmRunPath()); +//=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin' + +// `foo` is a locally installed binary +childProcess.execFileSync('foo', { + env: npmRunPath.env() +}); +``` + + +## API + +### npmRunPath([options]) + +#### options + +##### cwd + +Type: `string`
+Default: `process.cwd()` + +Working directory. + +##### path + +Type: `string`
+Default: [`PATH`](https://github.com/sindresorhus/path-key) + +PATH to be appended.
+Set it to an empty string to exclude the default PATH. + +### npmRunPath.env([options]) + +#### options + +##### cwd + +Type: `string`
+Default: `process.cwd()` + +Working directory. + +##### env + +Type: `Object` + +Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options. + + +## Related + +- [npm-run-path-cli](https://github.com/sindresorhus/npm-run-path-cli) - CLI for this module +- [execa](https://github.com/sindresorhus/execa) - Execute a locally installed binary + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/npmlog/CHANGELOG.md b/node_modules/npmlog/CHANGELOG.md new file mode 100644 index 00000000..51e4abc0 --- /dev/null +++ b/node_modules/npmlog/CHANGELOG.md @@ -0,0 +1,49 @@ +### v4.0.2 + +* Added installation instructions. + +### v4.0.1 + +* Fix bugs where `log.progressEnabled` got out of sync with how `gauge` kept + track of these things resulting in a progressbar that couldn't be disabled. + +### v4.0.0 + +* Allow creating log levels that are an empty string or 0. + +### v3.1.2 + +* Update to `gauge@1.6.0` adding support for default values for template + items. + +### v3.1.1 + +* Update to `gauge@1.5.3` to fix to `1.x` compatibility when it comes to + when a progress bar is enabled. In `1.x` if you didn't have a TTY the + progress bar was never shown. In `2.x` it merely defaults to disabled, + but you can enable it explicitly if you still want progress updates. + +### v3.1.0 + +* Update to `gauge@2.5.2`: + * Updates the `signal-exit` dependency which fixes an incompatibility with + the node profiler. + * Uses externalizes its ansi code generation in `console-control-strings` +* Make the default progress bar include the last line printed, colored as it + would be when printing to a tty. + +### v3.0.0 + +* Switch to `gauge@2.0.0`, for better performance, better look. +* Set stderr/stdout blocking if they're tty's, so that we can hide a + progress bar going to stderr and then safely print to stdout. Without + this the two can end up overlapping producing confusing and sometimes + corrupted output. + +### v2.0.0 + +* Make the `error` event non-fatal so that folks can use it as a prefix. + +### v1.0.0 + +* Add progress bar with `gauge@1.1.0` diff --git a/node_modules/npmlog/LICENSE b/node_modules/npmlog/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/npmlog/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npmlog/README.md b/node_modules/npmlog/README.md new file mode 100644 index 00000000..268a4af4 --- /dev/null +++ b/node_modules/npmlog/README.md @@ -0,0 +1,216 @@ +# npmlog + +The logger util that npm uses. + +This logger is very basic. It does the logging for npm. It supports +custom levels and colored output. + +By default, logs are written to stderr. If you want to send log messages +to outputs other than streams, then you can change the `log.stream` +member, or you can just listen to the events that it emits, and do +whatever you want with them. + +# Installation + +```console +npm install npmlog --save +``` + +# Basic Usage + +```javascript +var log = require('npmlog') + +// additional stuff ---------------------------+ +// message ----------+ | +// prefix ----+ | | +// level -+ | | | +// v v v v + log.info('fyi', 'I have a kitty cat: %j', myKittyCat) +``` + +## log.level + +* {String} + +The level to display logs at. Any logs at or above this level will be +displayed. The special level `silent` will prevent anything from being +displayed ever. + +## log.record + +* {Array} + +An array of all the log messages that have been entered. + +## log.maxRecordSize + +* {Number} + +The maximum number of records to keep. If log.record gets bigger than +10% over this value, then it is sliced down to 90% of this value. + +The reason for the 10% window is so that it doesn't have to resize a +large array on every log entry. + +## log.prefixStyle + +* {Object} + +A style object that specifies how prefixes are styled. (See below) + +## log.headingStyle + +* {Object} + +A style object that specifies how the heading is styled. (See below) + +## log.heading + +* {String} Default: "" + +If set, a heading that is printed at the start of every line. + +## log.stream + +* {Stream} Default: `process.stderr` + +The stream where output is written. + +## log.enableColor() + +Force colors to be used on all messages, regardless of the output +stream. + +## log.disableColor() + +Disable colors on all messages. + +## log.enableProgress() + +Enable the display of log activity spinner and progress bar + +## log.disableProgress() + +Disable the display of a progress bar + +## log.enableUnicode() + +Force the unicode theme to be used for the progress bar. + +## log.disableUnicode() + +Disable the use of unicode in the progress bar. + +## log.setGaugeTemplate(template) + +Set a template for outputting the progress bar. See the [gauge documentation] for details. + +[gauge documentation]: https://npmjs.com/package/gauge + +## log.setGaugeThemeset(themes) + +Select a themeset to pick themes from for the progress bar. See the [gauge documentation] for details. + +## log.pause() + +Stop emitting messages to the stream, but do not drop them. + +## log.resume() + +Emit all buffered messages that were written while paused. + +## log.log(level, prefix, message, ...) + +* `level` {String} The level to emit the message at +* `prefix` {String} A string prefix. Set to "" to skip. +* `message...` Arguments to `util.format` + +Emit a log message at the specified level. + +## log\[level](prefix, message, ...) + +For example, + +* log.silly(prefix, message, ...) +* log.verbose(prefix, message, ...) +* log.info(prefix, message, ...) +* log.http(prefix, message, ...) +* log.warn(prefix, message, ...) +* log.error(prefix, message, ...) + +Like `log.log(level, prefix, message, ...)`. In this way, each level is +given a shorthand, so you can do `log.info(prefix, message)`. + +## log.addLevel(level, n, style, disp) + +* `level` {String} Level indicator +* `n` {Number} The numeric level +* `style` {Object} Object with fg, bg, inverse, etc. +* `disp` {String} Optional replacement for `level` in the output. + +Sets up a new level with a shorthand function and so forth. + +Note that if the number is `Infinity`, then setting the level to that +will cause all log messages to be suppressed. If the number is +`-Infinity`, then the only way to show it is to enable all log messages. + +## log.newItem(name, todo, weight) + +* `name` {String} Optional; progress item name. +* `todo` {Number} Optional; total amount of work to be done. Default 0. +* `weight` {Number} Optional; the weight of this item relative to others. Default 1. + +This adds a new `are-we-there-yet` item tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `Tracker` object. + +## log.newStream(name, todo, weight) + +This adds a new `are-we-there-yet` stream tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerStream` object. + +## log.newGroup(name, weight) + +This adds a new `are-we-there-yet` tracker group to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerGroup` object. + +# Events + +Events are all emitted with the message object. + +* `log` Emitted for all messages +* `log.` Emitted for all messages with the `` level. +* `` Messages with prefixes also emit their prefix as an event. + +# Style Objects + +Style objects can have the following fields: + +* `fg` {String} Color for the foreground text +* `bg` {String} Color for the background +* `bold`, `inverse`, `underline` {Boolean} Set the associated property +* `bell` {Boolean} Make a noise (This is pretty annoying, probably.) + +# Message Objects + +Every log event is emitted with a message object, and the `log.record` +list contains all of them that have been created. They have the +following fields: + +* `id` {Number} +* `level` {String} +* `prefix` {String} +* `message` {String} Result of `util.format()` +* `messageRaw` {Array} Arguments to `util.format()` + +# Blocking TTYs + +We use [`set-blocking`](https://npmjs.com/package/set-blocking) to set +stderr and stdout blocking if they are tty's and have the setBlocking call. +This is a work around for an issue in early versions of Node.js 6.x, which +made stderr and stdout non-blocking on OSX. (They are always blocking +Windows and were never blocking on Linux.) `npmlog` needs them to be blocking +so that it can allow output to stdout and stderr to be interlaced. diff --git a/node_modules/npmlog/log.js b/node_modules/npmlog/log.js new file mode 100644 index 00000000..341f3313 --- /dev/null +++ b/node_modules/npmlog/log.js @@ -0,0 +1,309 @@ +'use strict' +var Progress = require('are-we-there-yet') +var Gauge = require('gauge') +var EE = require('events').EventEmitter +var log = exports = module.exports = new EE() +var util = require('util') + +var setBlocking = require('set-blocking') +var consoleControl = require('console-control-strings') + +setBlocking(true) +var stream = process.stderr +Object.defineProperty(log, 'stream', { + set: function (newStream) { + stream = newStream + if (this.gauge) this.gauge.setWriteTo(stream, stream) + }, + get: function () { + return stream + } +}) + +// by default, decide based on tty-ness. +var colorEnabled +log.useColor = function () { + return colorEnabled != null ? colorEnabled : stream.isTTY +} + +log.enableColor = function () { + colorEnabled = true + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} +log.disableColor = function () { + colorEnabled = false + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} + +// default level +log.level = 'info' + +log.gauge = new Gauge(stream, { + enabled: false, // no progress bars unless asked + theme: {hasColor: log.useColor()}, + template: [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', default: ''}, + ':', + {type: 'logline', kerning: 1, default: ''} + ] +}) + +log.tracker = new Progress.TrackerGroup() + +// we track this separately as we may need to temporarily disable the +// display of the status bar for our own loggy purposes. +log.progressEnabled = log.gauge.isEnabled() + +var unicodeEnabled + +log.enableUnicode = function () { + unicodeEnabled = true + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.disableUnicode = function () { + unicodeEnabled = false + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.setGaugeThemeset = function (themes) { + this.gauge.setThemeset(themes) +} + +log.setGaugeTemplate = function (template) { + this.gauge.setTemplate(template) +} + +log.enableProgress = function () { + if (this.progressEnabled) return + this.progressEnabled = true + this.tracker.on('change', this.showProgress) + if (this._pause) return + this.gauge.enable() +} + +log.disableProgress = function () { + if (!this.progressEnabled) return + this.progressEnabled = false + this.tracker.removeListener('change', this.showProgress) + this.gauge.disable() +} + +var trackerConstructors = ['newGroup', 'newItem', 'newStream'] + +var mixinLog = function (tracker) { + // mixin the public methods from log into the tracker + // (except: conflicts and one's we handle specially) + Object.keys(log).forEach(function (P) { + if (P[0] === '_') return + if (trackerConstructors.filter(function (C) { return C === P }).length) return + if (tracker[P]) return + if (typeof log[P] !== 'function') return + var func = log[P] + tracker[P] = function () { + return func.apply(log, arguments) + } + }) + // if the new tracker is a group, make sure any subtrackers get + // mixed in too + if (tracker instanceof Progress.TrackerGroup) { + trackerConstructors.forEach(function (C) { + var func = tracker[C] + tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) } + }) + } + return tracker +} + +// Add tracker constructors to the top level log object +trackerConstructors.forEach(function (C) { + log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) } +}) + +log.clearProgress = function (cb) { + if (!this.progressEnabled) return cb && process.nextTick(cb) + this.gauge.hide(cb) +} + +log.showProgress = function (name, completed) { + if (!this.progressEnabled) return + var values = {} + if (name) values.section = name + var last = log.record[log.record.length - 1] + if (last) { + values.subsection = last.prefix + var disp = log.disp[last.level] || last.level + var logline = this._format(disp, log.style[last.level]) + if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle) + logline += ' ' + last.message.split(/\r?\n/)[0] + values.logline = logline + } + values.completed = completed || this.tracker.completed() + this.gauge.show(values) +}.bind(log) // bind for use in tracker's on-change listener + +// temporarily stop emitting, but don't drop +log.pause = function () { + this._paused = true + if (this.progressEnabled) this.gauge.disable() +} + +log.resume = function () { + if (!this._paused) return + this._paused = false + + var b = this._buffer + this._buffer = [] + b.forEach(function (m) { + this.emitLog(m) + }, this) + if (this.progressEnabled) this.gauge.enable() +} + +log._buffer = [] + +var id = 0 +log.record = [] +log.maxRecordSize = 10000 +log.log = function (lvl, prefix, message) { + var l = this.levels[lvl] + if (l === undefined) { + return this.emit('error', new Error(util.format( + 'Undefined log level: %j', lvl))) + } + + var a = new Array(arguments.length - 2) + var stack = null + for (var i = 2; i < arguments.length; i++) { + var arg = a[i - 2] = arguments[i] + + // resolve stack traces to a plain string. + if (typeof arg === 'object' && arg && + (arg instanceof Error) && arg.stack) { + + Object.defineProperty(arg, 'stack', { + value: stack = arg.stack + '', + enumerable: true, + writable: true + }) + } + } + if (stack) a.unshift(stack + '\n') + message = util.format.apply(util, a) + + var m = { id: id++, + level: lvl, + prefix: String(prefix || ''), + message: message, + messageRaw: a } + + this.emit('log', m) + this.emit('log.' + lvl, m) + if (m.prefix) this.emit(m.prefix, m) + + this.record.push(m) + var mrs = this.maxRecordSize + var n = this.record.length - mrs + if (n > mrs / 10) { + var newSize = Math.floor(mrs * 0.9) + this.record = this.record.slice(-1 * newSize) + } + + this.emitLog(m) +}.bind(log) + +log.emitLog = function (m) { + if (this._paused) { + this._buffer.push(m) + return + } + if (this.progressEnabled) this.gauge.pulse(m.prefix) + var l = this.levels[m.level] + if (l === undefined) return + if (l < this.levels[this.level]) return + if (l > 0 && !isFinite(l)) return + + // If 'disp' is null or undefined, use the lvl as a default + // Allows: '', 0 as valid disp + var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level + this.clearProgress() + m.message.split(/\r?\n/).forEach(function (line) { + if (this.heading) { + this.write(this.heading, this.headingStyle) + this.write(' ') + } + this.write(disp, log.style[m.level]) + var p = m.prefix || '' + if (p) this.write(' ') + this.write(p, this.prefixStyle) + this.write(' ' + line + '\n') + }, this) + this.showProgress() +} + +log._format = function (msg, style) { + if (!stream) return + + var output = '' + if (this.useColor()) { + style = style || {} + var settings = [] + if (style.fg) settings.push(style.fg) + if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + if (style.bold) settings.push('bold') + if (style.underline) settings.push('underline') + if (style.inverse) settings.push('inverse') + if (settings.length) output += consoleControl.color(settings) + if (style.beep) output += consoleControl.beep() + } + output += msg + if (this.useColor()) { + output += consoleControl.color('reset') + } + return output +} + +log.write = function (msg, style) { + if (!stream) return + + stream.write(this._format(msg, style)) +} + +log.addLevel = function (lvl, n, style, disp) { + // If 'disp' is null or undefined, use the lvl as a default + if (disp == null) disp = lvl + this.levels[lvl] = n + this.style[lvl] = style + if (!this[lvl]) { + this[lvl] = function () { + var a = new Array(arguments.length + 1) + a[0] = lvl + for (var i = 0; i < arguments.length; i++) { + a[i + 1] = arguments[i] + } + return this.log.apply(this, a) + }.bind(this) + } + this.disp[lvl] = disp +} + +log.prefixStyle = { fg: 'magenta' } +log.headingStyle = { fg: 'white', bg: 'black' } + +log.style = {} +log.levels = {} +log.disp = {} +log.addLevel('silly', -Infinity, { inverse: true }, 'sill') +log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') +log.addLevel('info', 2000, { fg: 'green' }) +log.addLevel('timing', 2500, { fg: 'green', bg: 'black' }) +log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) +log.addLevel('notice', 3500, { fg: 'blue', bg: 'black' }) +log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') +log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') +log.addLevel('silent', Infinity) + +// allow 'error' prefix +log.on('error', function () {}) diff --git a/node_modules/npmlog/package.json b/node_modules/npmlog/package.json new file mode 100644 index 00000000..7220f8e7 --- /dev/null +++ b/node_modules/npmlog/package.json @@ -0,0 +1,28 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "npmlog", + "description": "logger for npm", + "version": "4.1.2", + "repository": { + "type": "git", + "url": "https://github.com/npm/npmlog.git" + }, + "main": "log.js", + "files": [ + "log.js" + ], + "scripts": { + "test": "standard && tap test/*.js" + }, + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + }, + "devDependencies": { + "standard": "~7.1.2", + "tap": "~5.7.3" + }, + "license": "ISC" +} diff --git a/node_modules/number-is-nan/index.js b/node_modules/number-is-nan/index.js new file mode 100644 index 00000000..79be4b9c --- /dev/null +++ b/node_modules/number-is-nan/index.js @@ -0,0 +1,4 @@ +'use strict'; +module.exports = Number.isNaN || function (x) { + return x !== x; +}; diff --git a/node_modules/number-is-nan/license b/node_modules/number-is-nan/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/number-is-nan/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/number-is-nan/package.json b/node_modules/number-is-nan/package.json new file mode 100644 index 00000000..d2f51d4b --- /dev/null +++ b/node_modules/number-is-nan/package.json @@ -0,0 +1,35 @@ +{ + "name": "number-is-nan", + "version": "1.0.1", + "description": "ES2015 Number.isNaN() ponyfill", + "license": "MIT", + "repository": "sindresorhus/number-is-nan", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "es2015", + "ecmascript", + "ponyfill", + "polyfill", + "shim", + "number", + "is", + "nan", + "not" + ], + "devDependencies": { + "ava": "*" + } +} diff --git a/node_modules/number-is-nan/readme.md b/node_modules/number-is-nan/readme.md new file mode 100644 index 00000000..24635087 --- /dev/null +++ b/node_modules/number-is-nan/readme.md @@ -0,0 +1,28 @@ +# number-is-nan [![Build Status](https://travis-ci.org/sindresorhus/number-is-nan.svg?branch=master)](https://travis-ci.org/sindresorhus/number-is-nan) + +> ES2015 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save number-is-nan +``` + + +## Usage + +```js +var numberIsNan = require('number-is-nan'); + +numberIsNan(NaN); +//=> true + +numberIsNan('unicorn'); +//=> false +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/object-assign/index.js b/node_modules/object-assign/index.js new file mode 100644 index 00000000..0930cf88 --- /dev/null +++ b/node_modules/object-assign/index.js @@ -0,0 +1,90 @@ +/* +object-assign +(c) Sindre Sorhus +@license MIT +*/ + +'use strict'; +/* eslint-disable no-unused-vars */ +var getOwnPropertySymbols = Object.getOwnPropertySymbols; +var hasOwnProperty = Object.prototype.hasOwnProperty; +var propIsEnumerable = Object.prototype.propertyIsEnumerable; + +function toObject(val) { + if (val === null || val === undefined) { + throw new TypeError('Object.assign cannot be called with null or undefined'); + } + + return Object(val); +} + +function shouldUseNative() { + try { + if (!Object.assign) { + return false; + } + + // Detect buggy property enumeration order in older V8 versions. + + // https://bugs.chromium.org/p/v8/issues/detail?id=4118 + var test1 = new String('abc'); // eslint-disable-line no-new-wrappers + test1[5] = 'de'; + if (Object.getOwnPropertyNames(test1)[0] === '5') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test2 = {}; + for (var i = 0; i < 10; i++) { + test2['_' + String.fromCharCode(i)] = i; + } + var order2 = Object.getOwnPropertyNames(test2).map(function (n) { + return test2[n]; + }); + if (order2.join('') !== '0123456789') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test3 = {}; + 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { + test3[letter] = letter; + }); + if (Object.keys(Object.assign({}, test3)).join('') !== + 'abcdefghijklmnopqrst') { + return false; + } + + return true; + } catch (err) { + // We don't expect any of the above to throw, but better to be safe. + return false; + } +} + +module.exports = shouldUseNative() ? Object.assign : function (target, source) { + var from; + var to = toObject(target); + var symbols; + + for (var s = 1; s < arguments.length; s++) { + from = Object(arguments[s]); + + for (var key in from) { + if (hasOwnProperty.call(from, key)) { + to[key] = from[key]; + } + } + + if (getOwnPropertySymbols) { + symbols = getOwnPropertySymbols(from); + for (var i = 0; i < symbols.length; i++) { + if (propIsEnumerable.call(from, symbols[i])) { + to[symbols[i]] = from[symbols[i]]; + } + } + } + } + + return to; +}; diff --git a/node_modules/object-assign/license b/node_modules/object-assign/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/object-assign/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/object-assign/package.json b/node_modules/object-assign/package.json new file mode 100644 index 00000000..503eb1e6 --- /dev/null +++ b/node_modules/object-assign/package.json @@ -0,0 +1,42 @@ +{ + "name": "object-assign", + "version": "4.1.1", + "description": "ES2015 `Object.assign()` ponyfill", + "license": "MIT", + "repository": "sindresorhus/object-assign", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava", + "bench": "matcha bench.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "object", + "assign", + "extend", + "properties", + "es2015", + "ecmascript", + "harmony", + "ponyfill", + "prollyfill", + "polyfill", + "shim", + "browser" + ], + "devDependencies": { + "ava": "^0.16.0", + "lodash": "^4.16.4", + "matcha": "^0.7.0", + "xo": "^0.16.0" + } +} diff --git a/node_modules/object-assign/readme.md b/node_modules/object-assign/readme.md new file mode 100644 index 00000000..1be09d35 --- /dev/null +++ b/node_modules/object-assign/readme.md @@ -0,0 +1,61 @@ +# object-assign [![Build Status](https://travis-ci.org/sindresorhus/object-assign.svg?branch=master)](https://travis-ci.org/sindresorhus/object-assign) + +> ES2015 [`Object.assign()`](http://www.2ality.com/2014/01/object-assign.html) [ponyfill](https://ponyfill.com) + + +## Use the built-in + +Node.js 4 and up, as well as every evergreen browser (Chrome, Edge, Firefox, Opera, Safari), +support `Object.assign()` :tada:. If you target only those environments, then by all +means, use `Object.assign()` instead of this package. + + +## Install + +``` +$ npm install --save object-assign +``` + + +## Usage + +```js +const objectAssign = require('object-assign'); + +objectAssign({foo: 0}, {bar: 1}); +//=> {foo: 0, bar: 1} + +// multiple sources +objectAssign({foo: 0}, {bar: 1}, {baz: 2}); +//=> {foo: 0, bar: 1, baz: 2} + +// overwrites equal keys +objectAssign({foo: 0}, {foo: 1}, {foo: 2}); +//=> {foo: 2} + +// ignores null and undefined sources +objectAssign({foo: 0}, null, {bar: 1}, undefined); +//=> {foo: 0, bar: 1} +``` + + +## API + +### objectAssign(target, [source, ...]) + +Assigns enumerable own properties of `source` objects to the `target` object and returns the `target` object. Additional `source` objects will overwrite previous ones. + + +## Resources + +- [ES2015 spec - Object.assign](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-object.assign) + + +## Related + +- [deep-assign](https://github.com/sindresorhus/deep-assign) - Recursive `Object.assign()` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/object-copy/LICENSE b/node_modules/object-copy/LICENSE new file mode 100644 index 00000000..e28e6032 --- /dev/null +++ b/node_modules/object-copy/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/object-copy/index.js b/node_modules/object-copy/index.js new file mode 100644 index 00000000..f9faa223 --- /dev/null +++ b/node_modules/object-copy/index.js @@ -0,0 +1,174 @@ +'use strict'; + +var typeOf = require('kind-of'); +var copyDescriptor = require('copy-descriptor'); +var define = require('define-property'); + +/** + * Copy static properties, prototype properties, and descriptors from one object to another. + * + * ```js + * function App() {} + * var proto = App.prototype; + * App.prototype.set = function() {}; + * App.prototype.get = function() {}; + * + * var obj = {}; + * copy(obj, proto); + * ``` + * @param {Object} `receiver` + * @param {Object} `provider` + * @param {String|Array} `omit` One or more properties to omit + * @return {Object} + * @api public + */ + +function copy(receiver, provider, omit) { + if (!isObject(receiver)) { + throw new TypeError('expected receiving object to be an object.'); + } + if (!isObject(provider)) { + throw new TypeError('expected providing object to be an object.'); + } + + var props = nativeKeys(provider); + var keys = Object.keys(provider); + var len = props.length; + omit = arrayify(omit); + + while (len--) { + var key = props[len]; + + if (has(keys, key)) { + define(receiver, key, provider[key]); + } else if (!(key in receiver) && !has(omit, key)) { + copyDescriptor(receiver, provider, key); + } + } +}; + +/** + * Return true if the given value is an object or function + */ + +function isObject(val) { + return typeOf(val) === 'object' || typeof val === 'function'; +} + +/** + * Returns true if an array has any of the given elements, or an + * object has any of the give keys. + * + * ```js + * has(['a', 'b', 'c'], 'c'); + * //=> true + * + * has(['a', 'b', 'c'], ['c', 'z']); + * //=> true + * + * has({a: 'b', c: 'd'}, ['c', 'z']); + * //=> true + * ``` + * @param {Object} `obj` + * @param {String|Array} `val` + * @return {Boolean} + */ + +function has(obj, val) { + val = arrayify(val); + var len = val.length; + + if (isObject(obj)) { + for (var key in obj) { + if (val.indexOf(key) > -1) { + return true; + } + } + + var keys = nativeKeys(obj); + return has(keys, val); + } + + if (Array.isArray(obj)) { + var arr = obj; + while (len--) { + if (arr.indexOf(val[len]) > -1) { + return true; + } + } + return false; + } + + throw new TypeError('expected an array or object.'); +} + +/** + * Cast the given value to an array. + * + * ```js + * arrayify('foo'); + * //=> ['foo'] + * + * arrayify(['foo']); + * //=> ['foo'] + * ``` + * + * @param {String|Array} `val` + * @return {Array} + */ + +function arrayify(val) { + return val ? (Array.isArray(val) ? val : [val]) : []; +} + +/** + * Returns true if a value has a `contructor` + * + * ```js + * hasConstructor({}); + * //=> true + * + * hasConstructor(Object.create(null)); + * //=> false + * ``` + * @param {Object} `value` + * @return {Boolean} + */ + +function hasConstructor(val) { + return isObject(val) && typeof val.constructor !== 'undefined'; +} + +/** + * Get the native `ownPropertyNames` from the constructor of the + * given `object`. An empty array is returned if the object does + * not have a constructor. + * + * ```js + * nativeKeys({a: 'b', b: 'c', c: 'd'}) + * //=> ['a', 'b', 'c'] + * + * nativeKeys(function(){}) + * //=> ['length', 'caller'] + * ``` + * + * @param {Object} `obj` Object that has a `constructor`. + * @return {Array} Array of keys. + */ + +function nativeKeys(val) { + if (!hasConstructor(val)) return []; + return Object.getOwnPropertyNames(val); +} + +/** + * Expose `copy` + */ + +module.exports = copy; + +/** + * Expose `copy.has` for tests + */ + +module.exports.has = has; diff --git a/node_modules/object-copy/package.json b/node_modules/object-copy/package.json new file mode 100644 index 00000000..f02a96cd --- /dev/null +++ b/node_modules/object-copy/package.json @@ -0,0 +1,47 @@ +{ + "name": "object-copy", + "description": "Copy static properties, prototype properties, and descriptors from one object to another.", + "version": "0.1.0", + "homepage": "https://github.com/jonschlinkert/object-copy", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/object-copy", + "bugs": { + "url": "https://github.com/jonschlinkert/object-copy/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "copy-descriptor": "^0.1.0", + "define-property": "^0.2.5", + "kind-of": "^3.0.3" + }, + "devDependencies": { + "gulp-format-md": "*", + "mocha": "*" + }, + "keywords": [ + "copy", + "object" + ], + "verb": { + "layout": "default", + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [] + }, + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/object-visit/LICENSE b/node_modules/object-visit/LICENSE new file mode 100644 index 00000000..ec85897e --- /dev/null +++ b/node_modules/object-visit/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, 2017, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/object-visit/README.md b/node_modules/object-visit/README.md new file mode 100644 index 00000000..64015cb9 --- /dev/null +++ b/node_modules/object-visit/README.md @@ -0,0 +1,83 @@ +# object-visit [![NPM version](https://img.shields.io/npm/v/object-visit.svg?style=flat)](https://www.npmjs.com/package/object-visit) [![NPM monthly downloads](https://img.shields.io/npm/dm/object-visit.svg?style=flat)](https://npmjs.org/package/object-visit) [![NPM total downloads](https://img.shields.io/npm/dt/object-visit.svg?style=flat)](https://npmjs.org/package/object-visit) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/object-visit.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/object-visit) + +> Call a specified method on each value in the given object. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save object-visit +``` + +## Usage + +```js +var visit = require('object-visit'); + +var ctx = { + data: {}, + set: function (key, value) { + if (typeof key === 'object') { + visit(ctx, 'set', key); + } else { + ctx.data[key] = value; + } + } +}; + +ctx.set('a', 'a'); +ctx.set('b', 'b'); +ctx.set('c', 'c'); +ctx.set({d: {e: 'f'}}); + +console.log(ctx.data); +//=> {a: 'a', b: 'b', c: 'c', d: { e: 'f' }}; +``` + +## About + +### Related projects + +* [base-methods](https://www.npmjs.com/package/base-methods): base-methods is the foundation for creating modular, unit testable and highly pluggable node.js applications, starting… [more](https://github.com/jonschlinkert/base-methods) | [homepage](https://github.com/jonschlinkert/base-methods "base-methods is the foundation for creating modular, unit testable and highly pluggable node.js applications, starting with a handful of common methods, like `set`, `get`, `del` and `use`.") +* [collection-visit](https://www.npmjs.com/package/collection-visit): Visit a method over the items in an object, or map visit over the objects… [more](https://github.com/jonschlinkert/collection-visit) | [homepage](https://github.com/jonschlinkert/collection-visit "Visit a method over the items in an object, or map visit over the objects in an array.") +* [define-property](https://www.npmjs.com/package/define-property): Define a non-enumerable property on an object. | [homepage](https://github.com/jonschlinkert/define-property "Define a non-enumerable property on an object.") +* [map-visit](https://www.npmjs.com/package/map-visit): Map `visit` over an array of objects. | [homepage](https://github.com/jonschlinkert/map-visit "Map `visit` over an array of objects.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on May 30, 2017._ \ No newline at end of file diff --git a/node_modules/object-visit/index.js b/node_modules/object-visit/index.js new file mode 100644 index 00000000..fcaeda92 --- /dev/null +++ b/node_modules/object-visit/index.js @@ -0,0 +1,33 @@ +/*! + * object-visit + * + * Copyright (c) 2015, 2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isObject = require('isobject'); + +module.exports = function visit(thisArg, method, target, val) { + if (!isObject(thisArg) && typeof thisArg !== 'function') { + throw new Error('object-visit expects `thisArg` to be an object.'); + } + + if (typeof method !== 'string') { + throw new Error('object-visit expects `method` name to be a string'); + } + + if (typeof thisArg[method] !== 'function') { + return thisArg; + } + + var args = [].slice.call(arguments, 3); + target = target || {}; + + for (var key in target) { + var arr = [key, target[key]].concat(args); + thisArg[method].apply(thisArg, arr); + } + return thisArg; +}; diff --git a/node_modules/object-visit/package.json b/node_modules/object-visit/package.json new file mode 100644 index 00000000..15fd0ff8 --- /dev/null +++ b/node_modules/object-visit/package.json @@ -0,0 +1,65 @@ +{ + "name": "object-visit", + "description": "Call a specified method on each value in the given object.", + "version": "1.0.1", + "homepage": "https://github.com/jonschlinkert/object-visit", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/object-visit", + "bugs": { + "url": "https://github.com/jonschlinkert/object-visit/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "isobject": "^3.0.0" + }, + "devDependencies": { + "gulp": "^3.9.1", + "gulp-eslint": "^3.0.1", + "gulp-format-md": "^0.1.12", + "gulp-istanbul": "^1.1.1", + "gulp-mocha": "^3.0.0", + "mocha": "^3.2.0" + }, + "keywords": [ + "context", + "function", + "helper", + "key", + "method", + "object", + "value", + "visit", + "visitor" + ], + "verb": { + "related": { + "list": [ + "base-methods", + "collection-visit", + "define-property", + "map-visit" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/object.pick/LICENSE b/node_modules/object.pick/LICENSE new file mode 100644 index 00000000..39245ac1 --- /dev/null +++ b/node_modules/object.pick/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/object.pick/README.md b/node_modules/object.pick/README.md new file mode 100644 index 00000000..48f74534 --- /dev/null +++ b/node_modules/object.pick/README.md @@ -0,0 +1,76 @@ +# object.pick [![NPM version](https://img.shields.io/npm/v/object.pick.svg?style=flat)](https://www.npmjs.com/package/object.pick) [![NPM monthly downloads](https://img.shields.io/npm/dm/object.pick.svg?style=flat)](https://npmjs.org/package/object.pick) [![NPM total downloads](https://img.shields.io/npm/dt/object.pick.svg?style=flat)](https://npmjs.org/package/object.pick) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/object.pick.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/object.pick) + +> Returns a filtered copy of an object with only the specified keys, similar to `_.pick` from lodash / underscore. + +You might also be interested in [object.omit](https://github.com/jonschlinkert/object.omit). + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save object.pick +``` + +## benchmarks + +This is the [fastest implementation](http://jsperf.com/pick-props) I tested. Pull requests welcome! + +## Usage + +```js +var pick = require('object.pick'); + +pick({a: 'a', b: 'b'}, 'a') +//=> {a: 'a'} + +pick({a: 'a', b: 'b', c: 'c'}, ['a', 'b']) +//=> {a: 'a', b: 'b'} +``` + +## About + +### Related projects + +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [get-value](https://www.npmjs.com/package/get-value): Use property paths (`a.b.c`) to get a nested value from an object. | [homepage](https://github.com/jonschlinkert/get-value "Use property paths (`a.b.c`) to get a nested value from an object.") +* [mixin-deep](https://www.npmjs.com/package/mixin-deep): Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone. | [homepage](https://github.com/jonschlinkert/mixin-deep "Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone.") +* [set-value](https://www.npmjs.com/package/set-value): Create nested values and any intermediaries using dot notation (`'a.b.c'`) paths. | [homepage](https://github.com/jonschlinkert/set-value "Create nested values and any intermediaries using dot notation (`'a.b.c'`) paths.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/object.pick/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.2.0, on October 27, 2016._ \ No newline at end of file diff --git a/node_modules/object.pick/index.js b/node_modules/object.pick/index.js new file mode 100644 index 00000000..0ce01782 --- /dev/null +++ b/node_modules/object.pick/index.js @@ -0,0 +1,35 @@ +/*! + * object.pick + * + * Copyright (c) 2014-2015 Jon Schlinkert, contributors. + * Licensed under the MIT License + */ + +'use strict'; + +var isObject = require('isobject'); + +module.exports = function pick(obj, keys) { + if (!isObject(obj) && typeof obj !== 'function') { + return {}; + } + + var res = {}; + if (typeof keys === 'string') { + if (keys in obj) { + res[keys] = obj[keys]; + } + return res; + } + + var len = keys.length; + var idx = -1; + + while (++idx < len) { + var key = keys[idx]; + if (key in obj) { + res[key] = obj[key]; + } + } + return res; +}; diff --git a/node_modules/object.pick/package.json b/node_modules/object.pick/package.json new file mode 100644 index 00000000..b655dbeb --- /dev/null +++ b/node_modules/object.pick/package.json @@ -0,0 +1,60 @@ +{ + "name": "object.pick", + "description": "Returns a filtered copy of an object with only the specified keys, similar to `_.pick` from lodash / underscore.", + "version": "1.3.0", + "homepage": "https://github.com/jonschlinkert/object.pick", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/object.pick", + "bugs": { + "url": "https://github.com/jonschlinkert/object.pick/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "isobject": "^3.0.1" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.1.2", + "vinyl": "^2.0.0" + }, + "keywords": [ + "object", + "pick" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "extend-shallow", + "get-value", + "mixin-deep", + "set-value" + ], + "highlight": "object.omit" + }, + "reflinks": [ + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/on-finished/HISTORY.md b/node_modules/on-finished/HISTORY.md new file mode 100644 index 00000000..98ff0e99 --- /dev/null +++ b/node_modules/on-finished/HISTORY.md @@ -0,0 +1,88 @@ +2.3.0 / 2015-05-26 +================== + + * Add defined behavior for HTTP `CONNECT` requests + * Add defined behavior for HTTP `Upgrade` requests + * deps: ee-first@1.1.1 + +2.2.1 / 2015-04-22 +================== + + * Fix `isFinished(req)` when data buffered + +2.2.0 / 2014-12-22 +================== + + * Add message object to callback arguments + +2.1.1 / 2014-10-22 +================== + + * Fix handling of pipelined requests + +2.1.0 / 2014-08-16 +================== + + * Check if `socket` is detached + * Return `undefined` for `isFinished` if state unknown + +2.0.0 / 2014-08-16 +================== + + * Add `isFinished` function + * Move to `jshttp` organization + * Remove support for plain socket argument + * Rename to `on-finished` + * Support both `req` and `res` as arguments + * deps: ee-first@1.0.5 + +1.2.2 / 2014-06-10 +================== + + * Reduce listeners added to emitters + - avoids "event emitter leak" warnings when used multiple times on same request + +1.2.1 / 2014-06-08 +================== + + * Fix returned value when already finished + +1.2.0 / 2014-06-05 +================== + + * Call callback when called on already-finished socket + +1.1.4 / 2014-05-27 +================== + + * Support node.js 0.8 + +1.1.3 / 2014-04-30 +================== + + * Make sure errors passed as instanceof `Error` + +1.1.2 / 2014-04-18 +================== + + * Default the `socket` to passed-in object + +1.1.1 / 2014-01-16 +================== + + * Rename module to `finished` + +1.1.0 / 2013-12-25 +================== + + * Call callback when called on already-errored socket + +1.0.1 / 2013-12-20 +================== + + * Actually pass the error to the callback + +1.0.0 / 2013-12-20 +================== + + * Initial release diff --git a/node_modules/on-finished/LICENSE b/node_modules/on-finished/LICENSE new file mode 100644 index 00000000..5931fd23 --- /dev/null +++ b/node_modules/on-finished/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2013 Jonathan Ong +Copyright (c) 2014 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/on-finished/README.md b/node_modules/on-finished/README.md new file mode 100644 index 00000000..a0e11574 --- /dev/null +++ b/node_modules/on-finished/README.md @@ -0,0 +1,154 @@ +# on-finished + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Execute a callback when a HTTP request closes, finishes, or errors. + +## Install + +```sh +$ npm install on-finished +``` + +## API + +```js +var onFinished = require('on-finished') +``` + +### onFinished(res, listener) + +Attach a listener to listen for the response to finish. The listener will +be invoked only once when the response finished. If the response finished +to an error, the first argument will contain the error. If the response +has already finished, the listener will be invoked. + +Listening to the end of a response would be used to close things associated +with the response, like open files. + +Listener is invoked as `listener(err, res)`. + +```js +onFinished(res, function (err, res) { + // clean up open fds, etc. + // err contains the error is request error'd +}) +``` + +### onFinished(req, listener) + +Attach a listener to listen for the request to finish. The listener will +be invoked only once when the request finished. If the request finished +to an error, the first argument will contain the error. If the request +has already finished, the listener will be invoked. + +Listening to the end of a request would be used to know when to continue +after reading the data. + +Listener is invoked as `listener(err, req)`. + +```js +var data = '' + +req.setEncoding('utf8') +res.on('data', function (str) { + data += str +}) + +onFinished(req, function (err, req) { + // data is read unless there is err +}) +``` + +### onFinished.isFinished(res) + +Determine if `res` is already finished. This would be useful to check and +not even start certain operations if the response has already finished. + +### onFinished.isFinished(req) + +Determine if `req` is already finished. This would be useful to check and +not even start certain operations if the request has already finished. + +## Special Node.js requests + +### HTTP CONNECT method + +The meaning of the `CONNECT` method from RFC 7231, section 4.3.6: + +> The CONNECT method requests that the recipient establish a tunnel to +> the destination origin server identified by the request-target and, +> if successful, thereafter restrict its behavior to blind forwarding +> of packets, in both directions, until the tunnel is closed. Tunnels +> are commonly used to create an end-to-end virtual connection, through +> one or more proxies, which can then be secured using TLS (Transport +> Layer Security, [RFC5246]). + +In Node.js, these request objects come from the `'connect'` event on +the HTTP server. + +When this module is used on a HTTP `CONNECT` request, the request is +considered "finished" immediately, **due to limitations in the Node.js +interface**. This means if the `CONNECT` request contains a request entity, +the request will be considered "finished" even before it has been read. + +There is no such thing as a response object to a `CONNECT` request in +Node.js, so there is no support for for one. + +### HTTP Upgrade request + +The meaning of the `Upgrade` header from RFC 7230, section 6.1: + +> The "Upgrade" header field is intended to provide a simple mechanism +> for transitioning from HTTP/1.1 to some other protocol on the same +> connection. + +In Node.js, these request objects come from the `'upgrade'` event on +the HTTP server. + +When this module is used on a HTTP request with an `Upgrade` header, the +request is considered "finished" immediately, **due to limitations in the +Node.js interface**. This means if the `Upgrade` request contains a request +entity, the request will be considered "finished" even before it has been +read. + +There is no such thing as a response object to a `Upgrade` request in +Node.js, so there is no support for for one. + +## Example + +The following code ensures that file descriptors are always closed +once the response finishes. + +```js +var destroy = require('destroy') +var http = require('http') +var onFinished = require('on-finished') + +http.createServer(function onRequest(req, res) { + var stream = fs.createReadStream('package.json') + stream.pipe(res) + onFinished(res, function (err) { + destroy(stream) + }) +}) +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/on-finished.svg +[npm-url]: https://npmjs.org/package/on-finished +[node-version-image]: https://img.shields.io/node/v/on-finished.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/on-finished/master.svg +[travis-url]: https://travis-ci.org/jshttp/on-finished +[coveralls-image]: https://img.shields.io/coveralls/jshttp/on-finished/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/on-finished?branch=master +[downloads-image]: https://img.shields.io/npm/dm/on-finished.svg +[downloads-url]: https://npmjs.org/package/on-finished diff --git a/node_modules/on-finished/index.js b/node_modules/on-finished/index.js new file mode 100644 index 00000000..9abd98f9 --- /dev/null +++ b/node_modules/on-finished/index.js @@ -0,0 +1,196 @@ +/*! + * on-finished + * Copyright(c) 2013 Jonathan Ong + * Copyright(c) 2014 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = onFinished +module.exports.isFinished = isFinished + +/** + * Module dependencies. + * @private + */ + +var first = require('ee-first') + +/** + * Variables. + * @private + */ + +/* istanbul ignore next */ +var defer = typeof setImmediate === 'function' + ? setImmediate + : function(fn){ process.nextTick(fn.bind.apply(fn, arguments)) } + +/** + * Invoke callback when the response has finished, useful for + * cleaning up resources afterwards. + * + * @param {object} msg + * @param {function} listener + * @return {object} + * @public + */ + +function onFinished(msg, listener) { + if (isFinished(msg) !== false) { + defer(listener, null, msg) + return msg + } + + // attach the listener to the message + attachListener(msg, listener) + + return msg +} + +/** + * Determine if message is already finished. + * + * @param {object} msg + * @return {boolean} + * @public + */ + +function isFinished(msg) { + var socket = msg.socket + + if (typeof msg.finished === 'boolean') { + // OutgoingMessage + return Boolean(msg.finished || (socket && !socket.writable)) + } + + if (typeof msg.complete === 'boolean') { + // IncomingMessage + return Boolean(msg.upgrade || !socket || !socket.readable || (msg.complete && !msg.readable)) + } + + // don't know + return undefined +} + +/** + * Attach a finished listener to the message. + * + * @param {object} msg + * @param {function} callback + * @private + */ + +function attachFinishedListener(msg, callback) { + var eeMsg + var eeSocket + var finished = false + + function onFinish(error) { + eeMsg.cancel() + eeSocket.cancel() + + finished = true + callback(error) + } + + // finished on first message event + eeMsg = eeSocket = first([[msg, 'end', 'finish']], onFinish) + + function onSocket(socket) { + // remove listener + msg.removeListener('socket', onSocket) + + if (finished) return + if (eeMsg !== eeSocket) return + + // finished on first socket event + eeSocket = first([[socket, 'error', 'close']], onFinish) + } + + if (msg.socket) { + // socket already assigned + onSocket(msg.socket) + return + } + + // wait for socket to be assigned + msg.on('socket', onSocket) + + if (msg.socket === undefined) { + // node.js 0.8 patch + patchAssignSocket(msg, onSocket) + } +} + +/** + * Attach the listener to the message. + * + * @param {object} msg + * @return {function} + * @private + */ + +function attachListener(msg, listener) { + var attached = msg.__onFinished + + // create a private single listener with queue + if (!attached || !attached.queue) { + attached = msg.__onFinished = createListener(msg) + attachFinishedListener(msg, attached) + } + + attached.queue.push(listener) +} + +/** + * Create listener on message. + * + * @param {object} msg + * @return {function} + * @private + */ + +function createListener(msg) { + function listener(err) { + if (msg.__onFinished === listener) msg.__onFinished = null + if (!listener.queue) return + + var queue = listener.queue + listener.queue = null + + for (var i = 0; i < queue.length; i++) { + queue[i](err, msg) + } + } + + listener.queue = [] + + return listener +} + +/** + * Patch ServerResponse.prototype.assignSocket for node.js 0.8. + * + * @param {ServerResponse} res + * @param {function} callback + * @private + */ + +function patchAssignSocket(res, callback) { + var assignSocket = res.assignSocket + + if (typeof assignSocket !== 'function') return + + // res.on('socket', callback) is broken in 0.8 + res.assignSocket = function _assignSocket(socket) { + assignSocket.call(this, socket) + callback(socket) + } +} diff --git a/node_modules/on-finished/package.json b/node_modules/on-finished/package.json new file mode 100644 index 00000000..b9df1bd2 --- /dev/null +++ b/node_modules/on-finished/package.json @@ -0,0 +1,31 @@ +{ + "name": "on-finished", + "description": "Execute a callback when a request closes, finishes, or errors", + "version": "2.3.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/on-finished", + "dependencies": { + "ee-first": "1.1.1" + }, + "devDependencies": { + "istanbul": "0.3.9", + "mocha": "2.2.5" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/once/LICENSE b/node_modules/once/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/once/README.md b/node_modules/once/README.md new file mode 100644 index 00000000..1f1ffca9 --- /dev/null +++ b/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/node_modules/once/once.js b/node_modules/once/once.js new file mode 100644 index 00000000..23540673 --- /dev/null +++ b/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/node_modules/once/package.json b/node_modules/once/package.json new file mode 100644 index 00000000..16815b2f --- /dev/null +++ b/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.4.0", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.0.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/os-homedir/index.js b/node_modules/os-homedir/index.js new file mode 100644 index 00000000..33066166 --- /dev/null +++ b/node_modules/os-homedir/index.js @@ -0,0 +1,24 @@ +'use strict'; +var os = require('os'); + +function homedir() { + var env = process.env; + var home = env.HOME; + var user = env.LOGNAME || env.USER || env.LNAME || env.USERNAME; + + if (process.platform === 'win32') { + return env.USERPROFILE || env.HOMEDRIVE + env.HOMEPATH || home || null; + } + + if (process.platform === 'darwin') { + return home || (user ? '/Users/' + user : null); + } + + if (process.platform === 'linux') { + return home || (process.getuid() === 0 ? '/root' : (user ? '/home/' + user : null)); + } + + return home || null; +} + +module.exports = typeof os.homedir === 'function' ? os.homedir : homedir; diff --git a/node_modules/os-homedir/license b/node_modules/os-homedir/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/os-homedir/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/os-homedir/package.json b/node_modules/os-homedir/package.json new file mode 100644 index 00000000..525b2251 --- /dev/null +++ b/node_modules/os-homedir/package.json @@ -0,0 +1,41 @@ +{ + "name": "os-homedir", + "version": "1.0.2", + "description": "Node.js 4 `os.homedir()` ponyfill", + "license": "MIT", + "repository": "sindresorhus/os-homedir", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "builtin", + "core", + "ponyfill", + "polyfill", + "shim", + "os", + "homedir", + "home", + "dir", + "directory", + "folder", + "user", + "path" + ], + "devDependencies": { + "ava": "*", + "path-exists": "^2.0.0", + "xo": "^0.16.0" + } +} diff --git a/node_modules/os-homedir/readme.md b/node_modules/os-homedir/readme.md new file mode 100644 index 00000000..856ae615 --- /dev/null +++ b/node_modules/os-homedir/readme.md @@ -0,0 +1,31 @@ +# os-homedir [![Build Status](https://travis-ci.org/sindresorhus/os-homedir.svg?branch=master)](https://travis-ci.org/sindresorhus/os-homedir) + +> Node.js 4 [`os.homedir()`](https://nodejs.org/api/os.html#os_os_homedir) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save os-homedir +``` + + +## Usage + +```js +const osHomedir = require('os-homedir'); + +console.log(osHomedir()); +//=> '/Users/sindresorhus' +``` + + +## Related + +- [user-home](https://github.com/sindresorhus/user-home) - Same as this module but caches the result +- [home-or-tmp](https://github.com/sindresorhus/home-or-tmp) - Get the user home directory with fallback to the system temp directory + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/os-tmpdir/index.js b/node_modules/os-tmpdir/index.js new file mode 100644 index 00000000..2077b1ce --- /dev/null +++ b/node_modules/os-tmpdir/index.js @@ -0,0 +1,25 @@ +'use strict'; +var isWindows = process.platform === 'win32'; +var trailingSlashRe = isWindows ? /[^:]\\$/ : /.\/$/; + +// https://github.com/nodejs/node/blob/3e7a14381497a3b73dda68d05b5130563cdab420/lib/os.js#L25-L43 +module.exports = function () { + var path; + + if (isWindows) { + path = process.env.TEMP || + process.env.TMP || + (process.env.SystemRoot || process.env.windir) + '\\temp'; + } else { + path = process.env.TMPDIR || + process.env.TMP || + process.env.TEMP || + '/tmp'; + } + + if (trailingSlashRe.test(path)) { + path = path.slice(0, -1); + } + + return path; +}; diff --git a/node_modules/os-tmpdir/license b/node_modules/os-tmpdir/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/os-tmpdir/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/os-tmpdir/package.json b/node_modules/os-tmpdir/package.json new file mode 100644 index 00000000..180a3176 --- /dev/null +++ b/node_modules/os-tmpdir/package.json @@ -0,0 +1,41 @@ +{ + "name": "os-tmpdir", + "version": "1.0.2", + "description": "Node.js os.tmpdir() ponyfill", + "license": "MIT", + "repository": "sindresorhus/os-tmpdir", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "built-in", + "core", + "ponyfill", + "polyfill", + "shim", + "os", + "tmpdir", + "tempdir", + "tmp", + "temp", + "dir", + "directory", + "env", + "environment" + ], + "devDependencies": { + "ava": "*", + "xo": "^0.16.0" + } +} diff --git a/node_modules/os-tmpdir/readme.md b/node_modules/os-tmpdir/readme.md new file mode 100644 index 00000000..c09f7ed8 --- /dev/null +++ b/node_modules/os-tmpdir/readme.md @@ -0,0 +1,32 @@ +# os-tmpdir [![Build Status](https://travis-ci.org/sindresorhus/os-tmpdir.svg?branch=master)](https://travis-ci.org/sindresorhus/os-tmpdir) + +> Node.js [`os.tmpdir()`](https://nodejs.org/api/os.html#os_os_tmpdir) [ponyfill](https://ponyfill.com) + +Use this instead of `require('os').tmpdir()` to get a consistent behavior on different Node.js versions (even 0.8). + + +## Install + +``` +$ npm install --save os-tmpdir +``` + + +## Usage + +```js +const osTmpdir = require('os-tmpdir'); + +osTmpdir(); +//=> '/var/folders/m3/5574nnhn0yj488ccryqr7tc80000gn/T' +``` + + +## API + +See the [`os.tmpdir()` docs](https://nodejs.org/api/os.html#os_os_tmpdir). + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/osenv/LICENSE b/node_modules/osenv/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/osenv/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/osenv/README.md b/node_modules/osenv/README.md new file mode 100644 index 00000000..08fd9002 --- /dev/null +++ b/node_modules/osenv/README.md @@ -0,0 +1,63 @@ +# osenv + +Look up environment settings specific to different operating systems. + +## Usage + +```javascript +var osenv = require('osenv') +var path = osenv.path() +var user = osenv.user() +// etc. + +// Some things are not reliably in the env, and have a fallback command: +var h = osenv.hostname(function (er, hostname) { + h = hostname +}) +// This will still cause it to be memoized, so calling osenv.hostname() +// is now an immediate operation. + +// You can always send a cb, which will get called in the nextTick +// if it's been memoized, or wait for the fallback data if it wasn't +// found in the environment. +osenv.hostname(function (er, hostname) { + if (er) console.error('error looking up hostname') + else console.log('this machine calls itself %s', hostname) +}) +``` + +## osenv.hostname() + +The machine name. Calls `hostname` if not found. + +## osenv.user() + +The currently logged-in user. Calls `whoami` if not found. + +## osenv.prompt() + +Either PS1 on unix, or PROMPT on Windows. + +## osenv.tmpdir() + +The place where temporary files should be created. + +## osenv.home() + +No place like it. + +## osenv.path() + +An array of the places that the operating system will search for +executables. + +## osenv.editor() + +Return the executable name of the editor program. This uses the EDITOR +and VISUAL environment variables, and falls back to `vi` on Unix, or +`notepad.exe` on Windows. + +## osenv.shell() + +The SHELL on Unix, which Windows calls the ComSpec. Defaults to 'bash' +or 'cmd'. diff --git a/node_modules/osenv/osenv.js b/node_modules/osenv/osenv.js new file mode 100644 index 00000000..702a95b9 --- /dev/null +++ b/node_modules/osenv/osenv.js @@ -0,0 +1,72 @@ +var isWindows = process.platform === 'win32' +var path = require('path') +var exec = require('child_process').exec +var osTmpdir = require('os-tmpdir') +var osHomedir = require('os-homedir') + +// looking up envs is a bit costly. +// Also, sometimes we want to have a fallback +// Pass in a callback to wait for the fallback on failures +// After the first lookup, always returns the same thing. +function memo (key, lookup, fallback) { + var fell = false + var falling = false + exports[key] = function (cb) { + var val = lookup() + if (!val && !fell && !falling && fallback) { + fell = true + falling = true + exec(fallback, function (er, output, stderr) { + falling = false + if (er) return // oh well, we tried + val = output.trim() + }) + } + exports[key] = function (cb) { + if (cb) process.nextTick(cb.bind(null, null, val)) + return val + } + if (cb && !falling) process.nextTick(cb.bind(null, null, val)) + return val + } +} + +memo('user', function () { + return ( isWindows + ? process.env.USERDOMAIN + '\\' + process.env.USERNAME + : process.env.USER + ) +}, 'whoami') + +memo('prompt', function () { + return isWindows ? process.env.PROMPT : process.env.PS1 +}) + +memo('hostname', function () { + return isWindows ? process.env.COMPUTERNAME : process.env.HOSTNAME +}, 'hostname') + +memo('tmpdir', function () { + return osTmpdir() +}) + +memo('home', function () { + return osHomedir() +}) + +memo('path', function () { + return (process.env.PATH || + process.env.Path || + process.env.path).split(isWindows ? ';' : ':') +}) + +memo('editor', function () { + return process.env.EDITOR || + process.env.VISUAL || + (isWindows ? 'notepad.exe' : 'vi') +}) + +memo('shell', function () { + return isWindows ? process.env.ComSpec || 'cmd' + : process.env.SHELL || 'bash' +}) diff --git a/node_modules/osenv/package.json b/node_modules/osenv/package.json new file mode 100644 index 00000000..90898f12 --- /dev/null +++ b/node_modules/osenv/package.json @@ -0,0 +1,37 @@ +{ + "name": "osenv", + "version": "0.1.5", + "main": "osenv.js", + "directories": { + "test": "test" + }, + "dependencies": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + }, + "devDependencies": { + "tap": "^11.1.0" + }, + "scripts": { + "test": "tap test/*.js", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": "https://github.com/npm/osenv", + "keywords": [ + "environment", + "variable", + "home", + "tmpdir", + "path", + "prompt", + "ps1" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "description": "Look up environment settings specific to different operating systems", + "files": [ + "osenv.js" + ] +} diff --git a/node_modules/p-finally/index.js b/node_modules/p-finally/index.js new file mode 100644 index 00000000..52b7b49c --- /dev/null +++ b/node_modules/p-finally/index.js @@ -0,0 +1,15 @@ +'use strict'; +module.exports = (promise, onFinally) => { + onFinally = onFinally || (() => {}); + + return promise.then( + val => new Promise(resolve => { + resolve(onFinally()); + }).then(() => val), + err => new Promise(resolve => { + resolve(onFinally()); + }).then(() => { + throw err; + }) + ); +}; diff --git a/node_modules/p-finally/license b/node_modules/p-finally/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/p-finally/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/p-finally/package.json b/node_modules/p-finally/package.json new file mode 100644 index 00000000..b26ab518 --- /dev/null +++ b/node_modules/p-finally/package.json @@ -0,0 +1,42 @@ +{ + "name": "p-finally", + "version": "1.0.0", + "description": "`Promise#finally()` ponyfill - Invoked when the promise is settled regardless of outcome", + "license": "MIT", + "repository": "sindresorhus/p-finally", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promise", + "finally", + "handler", + "function", + "async", + "await", + "promises", + "settled", + "ponyfill", + "polyfill", + "shim", + "bluebird" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/p-finally/readme.md b/node_modules/p-finally/readme.md new file mode 100644 index 00000000..09ef3641 --- /dev/null +++ b/node_modules/p-finally/readme.md @@ -0,0 +1,47 @@ +# p-finally [![Build Status](https://travis-ci.org/sindresorhus/p-finally.svg?branch=master)](https://travis-ci.org/sindresorhus/p-finally) + +> [`Promise#finally()`](https://github.com/tc39/proposal-promise-finally) [ponyfill](https://ponyfill.com) - Invoked when the promise is settled regardless of outcome + +Useful for cleanup. + + +## Install + +``` +$ npm install --save p-finally +``` + + +## Usage + +```js +const pFinally = require('p-finally'); + +const dir = createTempDir(); + +pFinally(write(dir), () => cleanup(dir)); +``` + + +## API + +### pFinally(promise, [onFinally]) + +Returns a `Promise`. + +#### onFinally + +Type: `Function` + +Note: Throwing or returning a rejected promise will reject `promise` with the rejection reason. + + +## Related + +- [p-try](https://github.com/sindresorhus/p-try) - `Promise#try()` ponyfill - Starts a promise chain +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/package-json/index.js b/node_modules/package-json/index.js new file mode 100644 index 00000000..1e588b27 --- /dev/null +++ b/node_modules/package-json/index.js @@ -0,0 +1,67 @@ +'use strict'; +const url = require('url'); +const got = require('got'); +const registryUrl = require('registry-url'); +const registryAuthToken = require('registry-auth-token'); +const semver = require('semver'); + +module.exports = (name, opts) => { + const scope = name.split('/')[0]; + const regUrl = registryUrl(scope); + const pkgUrl = url.resolve(regUrl, encodeURIComponent(name).replace(/^%40/, '@')); + const authInfo = registryAuthToken(regUrl, {recursive: true}); + + opts = Object.assign({ + version: 'latest' + }, opts); + + const headers = { + accept: 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' + }; + + if (opts.fullMetadata) { + delete headers.accept; + } + + if (authInfo) { + headers.authorization = `${authInfo.type} ${authInfo.token}`; + } + + return got(pkgUrl, {json: true, headers}) + .then(res => { + let data = res.body; + let version = opts.version; + + if (opts.allVersions) { + return data; + } + + if (data['dist-tags'][version]) { + data = data.versions[data['dist-tags'][version]]; + } else if (version) { + if (!data.versions[version]) { + const versions = Object.keys(data.versions); + version = semver.maxSatisfying(versions, version); + + if (!version) { + throw new Error('Version doesn\'t exist'); + } + } + + data = data.versions[version]; + + if (!data) { + throw new Error('Version doesn\'t exist'); + } + } + + return data; + }) + .catch(err => { + if (err.statusCode === 404) { + throw new Error(`Package \`${name}\` doesn't exist`); + } + + throw err; + }); +}; diff --git a/node_modules/package-json/license b/node_modules/package-json/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/package-json/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/package-json/node_modules/.bin/semver b/node_modules/package-json/node_modules/.bin/semver new file mode 120000 index 00000000..b3ca6032 --- /dev/null +++ b/node_modules/package-json/node_modules/.bin/semver @@ -0,0 +1 @@ +../../../semver/bin/semver \ No newline at end of file diff --git a/node_modules/package-json/package.json b/node_modules/package-json/package.json new file mode 100644 index 00000000..f00c245b --- /dev/null +++ b/node_modules/package-json/package.json @@ -0,0 +1,43 @@ +{ + "name": "package-json", + "version": "4.0.1", + "description": "Get metadata of a package from the npm registry", + "license": "MIT", + "repository": "sindresorhus/package-json", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "npm", + "registry", + "package", + "pkg", + "package.json", + "json", + "module", + "scope", + "scoped" + ], + "dependencies": { + "got": "^6.7.1", + "registry-auth-token": "^3.0.1", + "registry-url": "^3.0.3", + "semver": "^5.1.0" + }, + "devDependencies": { + "ava": "*", + "mock-private-registry": "^1.1.0", + "xo": "*" + } +} diff --git a/node_modules/package-json/readme.md b/node_modules/package-json/readme.md new file mode 100644 index 00000000..477e25c2 --- /dev/null +++ b/node_modules/package-json/readme.md @@ -0,0 +1,91 @@ +# package-json [![Build Status](https://travis-ci.org/sindresorhus/package-json.svg?branch=master)](https://travis-ci.org/sindresorhus/package-json) + +> Get metadata of a package from the npm registry + + +## Install + +``` +$ npm install --save package-json +``` + + +## Usage + +```js +const packageJson = require('package-json'); + +packageJson('ava').then(json => { + console.log(json); + //=> {name: 'ava', ...} +}); + +// Also works with scoped packages +packageJson('@sindresorhus/df').then(json => { + console.log(json); + //=> {name: '@sindresorhus/df', ...} +}); +``` + + +## API + +### packageJson(name, [options]) + +#### name + +Type: `string` + +Name of the package. + +#### options + +Type: `Object` + +##### version + +Type: `string`
+Default: `latest` + +Package version such as `1.0.0` or a [dist tag](https://docs.npmjs.com/cli/dist-tag) such as `latest`. + +The version can also be in any format supported by the [semver](https://github.com/npm/node-semver) module. For example: + +- `1` - get the latest `1.x.x` +- `1.2` - get the latest `1.2.x` +- `^1.2.3` - get the latest `1.x.x` but at least `1.2.3` +- `~1.2.3` - get the latest `1.2.x` but at least `1.2.3` + +##### fullMetadata + +Type: `boolean`
+Default: `false` + +By default, only an abbreviated metadata object is returned for performance reasons. [Read more.](https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md) + +##### allVersions + +Type: `boolean`
+Default: `false` + +Return the [main entry](https://registry.npmjs.org/ava) containing all versions. + + +## Authentication + +Both public and private registries are supported, for both scoped and unscoped packages, as long as the registry uses either bearer tokens or basic authentication. + + +## Related + +- [package-json-cli](https://github.com/sindresorhus/package-json-cli) - CLI for this module +- [latest-version](https://github.com/sindresorhus/latest-version) - Get the latest version of an npm package +- [pkg-versions](https://github.com/sindresorhus/pkg-versions) - Get the version numbers of a package from the npm registry +- [npm-keyword](https://github.com/sindresorhus/npm-keyword) - Get a list of npm packages with a certain keyword +- [npm-user](https://github.com/sindresorhus/npm-user) - Get user info of an npm user +- [npm-email](https://github.com/sindresorhus/npm-email) - Get the email of an npm user + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/parseurl/HISTORY.md b/node_modules/parseurl/HISTORY.md new file mode 100644 index 00000000..8e409541 --- /dev/null +++ b/node_modules/parseurl/HISTORY.md @@ -0,0 +1,58 @@ +1.3.3 / 2019-04-15 +================== + + * Fix Node.js 0.8 return value inconsistencies + +1.3.2 / 2017-09-09 +================== + + * perf: reduce overhead for full URLs + * perf: unroll the "fast-path" `RegExp` + +1.3.1 / 2016-01-17 +================== + + * perf: enable strict mode + +1.3.0 / 2014-08-09 +================== + + * Add `parseurl.original` for parsing `req.originalUrl` with fallback + * Return `undefined` if `req.url` is `undefined` + +1.2.0 / 2014-07-21 +================== + + * Cache URLs based on original value + * Remove no-longer-needed URL mis-parse work-around + * Simplify the "fast-path" `RegExp` + +1.1.3 / 2014-07-08 +================== + + * Fix typo + +1.1.2 / 2014-07-08 +================== + + * Seriously fix Node.js 0.8 compatibility + +1.1.1 / 2014-07-08 +================== + + * Fix Node.js 0.8 compatibility + +1.1.0 / 2014-07-08 +================== + + * Incorporate URL href-only parse fast-path + +1.0.1 / 2014-03-08 +================== + + * Add missing `require` + +1.0.0 / 2014-03-08 +================== + + * Genesis from `connect` diff --git a/node_modules/parseurl/LICENSE b/node_modules/parseurl/LICENSE new file mode 100644 index 00000000..27653d3d --- /dev/null +++ b/node_modules/parseurl/LICENSE @@ -0,0 +1,24 @@ + +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2017 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/parseurl/README.md b/node_modules/parseurl/README.md new file mode 100644 index 00000000..443e716b --- /dev/null +++ b/node_modules/parseurl/README.md @@ -0,0 +1,133 @@ +# parseurl + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Parse a URL with memoization. + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install parseurl +``` + +## API + +```js +var parseurl = require('parseurl') +``` + +### parseurl(req) + +Parse the URL of the given request object (looks at the `req.url` property) +and return the result. The result is the same as `url.parse` in Node.js core. +Calling this function multiple times on the same `req` where `req.url` does +not change will return a cached parsed object, rather than parsing again. + +### parseurl.original(req) + +Parse the original URL of the given request object and return the result. +This works by trying to parse `req.originalUrl` if it is a string, otherwise +parses `req.url`. The result is the same as `url.parse` in Node.js core. +Calling this function multiple times on the same `req` where `req.originalUrl` +does not change will return a cached parsed object, rather than parsing again. + +## Benchmark + +```bash +$ npm run-script bench + +> parseurl@1.3.3 bench nodejs-parseurl +> node benchmark/index.js + + http_parser@2.8.0 + node@10.6.0 + v8@6.7.288.46-node.13 + uv@1.21.0 + zlib@1.2.11 + ares@1.14.0 + modules@64 + nghttp2@1.32.0 + napi@3 + openssl@1.1.0h + icu@61.1 + unicode@10.0 + cldr@33.0 + tz@2018c + +> node benchmark/fullurl.js + + Parsing URL "http://localhost:8888/foo/bar?user=tj&pet=fluffy" + + 4 tests completed. + + fasturl x 2,207,842 ops/sec ±3.76% (184 runs sampled) + nativeurl - legacy x 507,180 ops/sec ±0.82% (191 runs sampled) + nativeurl - whatwg x 290,044 ops/sec ±1.96% (189 runs sampled) + parseurl x 488,907 ops/sec ±2.13% (192 runs sampled) + +> node benchmark/pathquery.js + + Parsing URL "/foo/bar?user=tj&pet=fluffy" + + 4 tests completed. + + fasturl x 3,812,564 ops/sec ±3.15% (188 runs sampled) + nativeurl - legacy x 2,651,631 ops/sec ±1.68% (189 runs sampled) + nativeurl - whatwg x 161,837 ops/sec ±2.26% (189 runs sampled) + parseurl x 4,166,338 ops/sec ±2.23% (184 runs sampled) + +> node benchmark/samerequest.js + + Parsing URL "/foo/bar?user=tj&pet=fluffy" on same request object + + 4 tests completed. + + fasturl x 3,821,651 ops/sec ±2.42% (185 runs sampled) + nativeurl - legacy x 2,651,162 ops/sec ±1.90% (187 runs sampled) + nativeurl - whatwg x 175,166 ops/sec ±1.44% (188 runs sampled) + parseurl x 14,912,606 ops/sec ±3.59% (183 runs sampled) + +> node benchmark/simplepath.js + + Parsing URL "/foo/bar" + + 4 tests completed. + + fasturl x 12,421,765 ops/sec ±2.04% (191 runs sampled) + nativeurl - legacy x 7,546,036 ops/sec ±1.41% (188 runs sampled) + nativeurl - whatwg x 198,843 ops/sec ±1.83% (189 runs sampled) + parseurl x 24,244,006 ops/sec ±0.51% (194 runs sampled) + +> node benchmark/slash.js + + Parsing URL "/" + + 4 tests completed. + + fasturl x 17,159,456 ops/sec ±3.25% (188 runs sampled) + nativeurl - legacy x 11,635,097 ops/sec ±3.79% (184 runs sampled) + nativeurl - whatwg x 240,693 ops/sec ±0.83% (189 runs sampled) + parseurl x 42,279,067 ops/sec ±0.55% (190 runs sampled) +``` + +## License + + [MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/pillarjs/parseurl/master +[coveralls-url]: https://coveralls.io/r/pillarjs/parseurl?branch=master +[node-image]: https://badgen.net/npm/node/parseurl +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/parseurl +[npm-url]: https://npmjs.org/package/parseurl +[npm-version-image]: https://badgen.net/npm/v/parseurl +[travis-image]: https://badgen.net/travis/pillarjs/parseurl/master +[travis-url]: https://travis-ci.org/pillarjs/parseurl diff --git a/node_modules/parseurl/index.js b/node_modules/parseurl/index.js new file mode 100644 index 00000000..ece72232 --- /dev/null +++ b/node_modules/parseurl/index.js @@ -0,0 +1,158 @@ +/*! + * parseurl + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2017 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var url = require('url') +var parse = url.parse +var Url = url.Url + +/** + * Module exports. + * @public + */ + +module.exports = parseurl +module.exports.original = originalurl + +/** + * Parse the `req` url with memoization. + * + * @param {ServerRequest} req + * @return {Object} + * @public + */ + +function parseurl (req) { + var url = req.url + + if (url === undefined) { + // URL is undefined + return undefined + } + + var parsed = req._parsedUrl + + if (fresh(url, parsed)) { + // Return cached URL parse + return parsed + } + + // Parse the URL + parsed = fastparse(url) + parsed._raw = url + + return (req._parsedUrl = parsed) +}; + +/** + * Parse the `req` original url with fallback and memoization. + * + * @param {ServerRequest} req + * @return {Object} + * @public + */ + +function originalurl (req) { + var url = req.originalUrl + + if (typeof url !== 'string') { + // Fallback + return parseurl(req) + } + + var parsed = req._parsedOriginalUrl + + if (fresh(url, parsed)) { + // Return cached URL parse + return parsed + } + + // Parse the URL + parsed = fastparse(url) + parsed._raw = url + + return (req._parsedOriginalUrl = parsed) +}; + +/** + * Parse the `str` url with fast-path short-cut. + * + * @param {string} str + * @return {Object} + * @private + */ + +function fastparse (str) { + if (typeof str !== 'string' || str.charCodeAt(0) !== 0x2f /* / */) { + return parse(str) + } + + var pathname = str + var query = null + var search = null + + // This takes the regexp from https://github.com/joyent/node/pull/7878 + // Which is /^(\/[^?#\s]*)(\?[^#\s]*)?$/ + // And unrolls it into a for loop + for (var i = 1; i < str.length; i++) { + switch (str.charCodeAt(i)) { + case 0x3f: /* ? */ + if (search === null) { + pathname = str.substring(0, i) + query = str.substring(i + 1) + search = str.substring(i) + } + break + case 0x09: /* \t */ + case 0x0a: /* \n */ + case 0x0c: /* \f */ + case 0x0d: /* \r */ + case 0x20: /* */ + case 0x23: /* # */ + case 0xa0: + case 0xfeff: + return parse(str) + } + } + + var url = Url !== undefined + ? new Url() + : {} + + url.path = str + url.href = str + url.pathname = pathname + + if (search !== null) { + url.query = query + url.search = search + } + + return url +} + +/** + * Determine if parsed is still fresh for url. + * + * @param {string} url + * @param {object} parsedUrl + * @return {boolean} + * @private + */ + +function fresh (url, parsedUrl) { + return typeof parsedUrl === 'object' && + parsedUrl !== null && + (Url === undefined || parsedUrl instanceof Url) && + parsedUrl._raw === url +} diff --git a/node_modules/parseurl/package.json b/node_modules/parseurl/package.json new file mode 100644 index 00000000..6b443ca7 --- /dev/null +++ b/node_modules/parseurl/package.json @@ -0,0 +1,40 @@ +{ + "name": "parseurl", + "description": "parse a url with memoization", + "version": "1.3.3", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "repository": "pillarjs/parseurl", + "license": "MIT", + "devDependencies": { + "beautify-benchmark": "0.2.4", + "benchmark": "2.1.4", + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.17.1", + "eslint-plugin-node": "7.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "fast-url-parser": "1.1.3", + "istanbul": "0.4.5", + "mocha": "6.1.3" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "bench": "node benchmark/index.js", + "lint": "eslint .", + "test": "mocha --check-leaks --bail --reporter spec test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --check-leaks --reporter dot test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --check-leaks --reporter spec test/" + } +} diff --git a/node_modules/pascalcase/LICENSE b/node_modules/pascalcase/LICENSE new file mode 100644 index 00000000..65f90aca --- /dev/null +++ b/node_modules/pascalcase/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/pascalcase/README.md b/node_modules/pascalcase/README.md new file mode 100644 index 00000000..fa3fd00f --- /dev/null +++ b/node_modules/pascalcase/README.md @@ -0,0 +1,80 @@ +# pascalcase [![NPM version](https://badge.fury.io/js/pascalcase.svg)](http://badge.fury.io/js/pascalcase) + +> Convert a string to pascal-case. + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i pascalcase --save +``` + +## Usage + +```js +var pascalcase = require('pascalcase'); + +pascalcase('a'); +//=> 'A' + +pascalcase('foo bar baz'); +//=> 'FooBarBaz' + +pascalcase('foo_bar-baz'); +//=> 'FooBarBaz' + +pascalcase('foo.bar.baz'); +//=> 'FooBarBaz' + +pascalcase('foo/bar/baz'); +//=> 'FooBarBaz' + +pascalcase('foo[bar)baz'); +//=> 'FooBarBaz' + +pascalcase('#foo+bar*baz'); +//=> 'FooBarBaz' + +pascalcase('$foo~bar`baz'); +//=> 'FooBarBaz' + +pascalcase('_foo_bar-baz-'); +//=> 'FooBarBaz' +``` + +## Related projects + +* [justified](https://github.com/jonschlinkert/justified): Wrap words to a specified length and justified the text. +* [pad-left](https://github.com/jonschlinkert/pad-left): Left pad a string with zeros or a specified string. Fastest implementation. +* [pad-right](https://github.com/jonschlinkert/pad-right): Right pad a string with zeros or a specified string. Fastest implementation. +* [repeat-string](https://github.com/jonschlinkert/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. +* [word-wrap](https://github.com/jonschlinkert/word-wrap): Wrap words to a specified length. + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/pascalcase/issues/new) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on August 19, 2015._ \ No newline at end of file diff --git a/node_modules/pascalcase/index.js b/node_modules/pascalcase/index.js new file mode 100644 index 00000000..7e8159ce --- /dev/null +++ b/node_modules/pascalcase/index.js @@ -0,0 +1,21 @@ +/*! + * pascalcase + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +function pascalcase(str) { + if (typeof str !== 'string') { + throw new TypeError('expected a string.'); + } + str = str.replace(/([A-Z])/g, ' $1'); + if (str.length === 1) { return str.toUpperCase(); } + str = str.replace(/^[\W_]+|[\W_]+$/g, '').toLowerCase(); + str = str.charAt(0).toUpperCase() + str.slice(1); + return str.replace(/[\W_]+(\w|$)/g, function (_, ch) { + return ch.toUpperCase(); + }); +} + +module.exports = pascalcase; diff --git a/node_modules/pascalcase/package.json b/node_modules/pascalcase/package.json new file mode 100644 index 00000000..0576d139 --- /dev/null +++ b/node_modules/pascalcase/package.json @@ -0,0 +1,46 @@ +{ + "name": "pascalcase", + "description": "Convert a string to pascal-case.", + "version": "0.1.1", + "homepage": "https://github.com/jonschlinkert/pascalcase", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/pascalcase", + "bugs": { + "url": "https://github.com/jonschlinkert/pascalcase/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "keywords": [ + "camelcase", + "case", + "casing", + "pascal", + "pascal-case", + "pascalcase", + "string" + ], + "verb": { + "related": { + "list": [ + "pad-left", + "pad-right", + "word-wrap", + "repeat-string", + "justified" + ] + } + } +} \ No newline at end of file diff --git a/node_modules/path-dirname/index.js b/node_modules/path-dirname/index.js new file mode 100644 index 00000000..ed67817a --- /dev/null +++ b/node_modules/path-dirname/index.js @@ -0,0 +1,143 @@ +'use strict'; + +var path = require('path'); +var inspect = require('util').inspect; + +function assertPath(path) { + if (typeof path !== 'string') { + throw new TypeError('Path must be a string. Received ' + inspect(path)); + } +} + +function posix(path) { + assertPath(path); + if (path.length === 0) + return '.'; + var code = path.charCodeAt(0); + var hasRoot = (code === 47/*/*/); + var end = -1; + var matchedSlash = true; + for (var i = path.length - 1; i >= 1; --i) { + code = path.charCodeAt(i); + if (code === 47/*/*/) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) + return hasRoot ? '/' : '.'; + if (hasRoot && end === 1) + return '//'; + return path.slice(0, end); +} + +function win32(path) { + assertPath(path); + var len = path.length; + if (len === 0) + return '.'; + var rootEnd = -1; + var end = -1; + var matchedSlash = true; + var offset = 0; + var code = path.charCodeAt(0); + + // Try to match a root + if (len > 1) { + if (code === 47/*/*/ || code === 92/*\*/) { + // Possible UNC root + + rootEnd = offset = 1; + + code = path.charCodeAt(1); + if (code === 47/*/*/ || code === 92/*\*/) { + // Matched double path separator at beginning + var j = 2; + var last = j; + // Match 1 or more non-path separators + for (; j < len; ++j) { + code = path.charCodeAt(j); + if (code === 47/*/*/ || code === 92/*\*/) + break; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more path separators + for (; j < len; ++j) { + code = path.charCodeAt(j); + if (code !== 47/*/*/ && code !== 92/*\*/) + break; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + for (; j < len; ++j) { + code = path.charCodeAt(j); + if (code === 47/*/*/ || code === 92/*\*/) + break; + } + if (j === len) { + // We matched a UNC root only + return path; + } + if (j !== last) { + // We matched a UNC root with leftovers + + // Offset by 1 to include the separator after the UNC root to + // treat it as a "normal root" on top of a (UNC) root + rootEnd = offset = j + 1; + } + } + } + } + } else if ((code >= 65/*A*/ && code <= 90/*Z*/) || + (code >= 97/*a*/ && code <= 122/*z*/)) { + // Possible device root + + code = path.charCodeAt(1); + if (path.charCodeAt(1) === 58/*:*/) { + rootEnd = offset = 2; + if (len > 2) { + code = path.charCodeAt(2); + if (code === 47/*/*/ || code === 92/*\*/) + rootEnd = offset = 3; + } + } + } + } else if (code === 47/*/*/ || code === 92/*\*/) { + return path[0]; + } + + for (var i = len - 1; i >= offset; --i) { + code = path.charCodeAt(i); + if (code === 47/*/*/ || code === 92/*\*/) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) { + if (rootEnd === -1) + return '.'; + else + end = rootEnd; + } + return path.slice(0, end); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; diff --git a/node_modules/path-dirname/license b/node_modules/path-dirname/license new file mode 100644 index 00000000..1981663a --- /dev/null +++ b/node_modules/path-dirname/license @@ -0,0 +1,22 @@ + +The MIT License (MIT) + +Copyright (c) Elan Shanker and Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/path-dirname/package.json b/node_modules/path-dirname/package.json new file mode 100644 index 00000000..eb78533b --- /dev/null +++ b/node_modules/path-dirname/package.json @@ -0,0 +1,29 @@ +{ + "name": "path-dirname", + "version": "1.0.2", + "description": "Node.js path.dirname() ponyfill", + "license": "MIT", + "repository": "es128/path-dirname", + "author": "Elan Shanker", + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "dirname", + "dir", + "path", + "paths", + "file", + "built-in", + "util", + "utils", + "core", + "stdlib", + "ponyfill", + "polyfill", + "shim" + ] +} diff --git a/node_modules/path-dirname/readme.md b/node_modules/path-dirname/readme.md new file mode 100644 index 00000000..652a5623 --- /dev/null +++ b/node_modules/path-dirname/readme.md @@ -0,0 +1,53 @@ +# path-dirname [![Build Status](https://travis-ci.org/es128/path-dirname.svg?branch=master)](https://travis-ci.org/es128/path-dirname) + +> Node.js [`path.dirname()`](https://nodejs.org/api/path.html#path_path_dirname_path) [ponyfill](https://ponyfill.com) + +This was needed in order to expose `path.posix.dirname()` on Node.js v0.10 + +## Install + +``` +$ npm install --save path-dirname +``` + + +## Usage + +```js +const pathDirname = require('path-dirname'); + +pathDirname('/home/foo'); +//=> '/home' +pathDirname('C:\\Users\\foo'); +//=> 'C:\\Users' +pathDirname('foo'); +//=> '.' +pathDirname('foo/bar'); +//=> 'foo' + +//Using posix version for consistent output when dealing with glob escape chars +pathDirname.win32('C:\\Users\\foo/\\*bar'); +//=> 'C:\\Users\\foo/' +pathDirname.posix('C:\\Users\\foo/\\*bar'); +//=> 'C:\\Users\\foo' +``` + + +## API + +See the [`path.dirname()` docs](https://nodejs.org/api/path.html#path_path_dirname_path). + +### pathDirname(path) + +### pathDirname.posix(path) + +POSIX specific version. + +### pathDirname.win32(path) + +Windows specific version. + + +## License + +MIT diff --git a/node_modules/path-is-absolute/index.js b/node_modules/path-is-absolute/index.js new file mode 100644 index 00000000..22aa6c35 --- /dev/null +++ b/node_modules/path-is-absolute/index.js @@ -0,0 +1,20 @@ +'use strict'; + +function posix(path) { + return path.charAt(0) === '/'; +} + +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; diff --git a/node_modules/path-is-absolute/license b/node_modules/path-is-absolute/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/path-is-absolute/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/path-is-absolute/package.json b/node_modules/path-is-absolute/package.json new file mode 100644 index 00000000..91196d5e --- /dev/null +++ b/node_modules/path-is-absolute/package.json @@ -0,0 +1,43 @@ +{ + "name": "path-is-absolute", + "version": "1.0.1", + "description": "Node.js 0.12 path.isAbsolute() ponyfill", + "license": "MIT", + "repository": "sindresorhus/path-is-absolute", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "path", + "paths", + "file", + "dir", + "absolute", + "isabsolute", + "is-absolute", + "built-in", + "util", + "utils", + "core", + "ponyfill", + "polyfill", + "shim", + "is", + "detect", + "check" + ], + "devDependencies": { + "xo": "^0.16.0" + } +} diff --git a/node_modules/path-is-absolute/readme.md b/node_modules/path-is-absolute/readme.md new file mode 100644 index 00000000..8dbdf5fc --- /dev/null +++ b/node_modules/path-is-absolute/readme.md @@ -0,0 +1,59 @@ +# path-is-absolute [![Build Status](https://travis-ci.org/sindresorhus/path-is-absolute.svg?branch=master)](https://travis-ci.org/sindresorhus/path-is-absolute) + +> Node.js 0.12 [`path.isAbsolute()`](http://nodejs.org/api/path.html#path_path_isabsolute_path) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save path-is-absolute +``` + + +## Usage + +```js +const pathIsAbsolute = require('path-is-absolute'); + +// Running on Linux +pathIsAbsolute('/home/foo'); +//=> true +pathIsAbsolute('C:/Users/foo'); +//=> false + +// Running on Windows +pathIsAbsolute('C:/Users/foo'); +//=> true +pathIsAbsolute('/home/foo'); +//=> false + +// Running on any OS +pathIsAbsolute.posix('/home/foo'); +//=> true +pathIsAbsolute.posix('C:/Users/foo'); +//=> false +pathIsAbsolute.win32('C:/Users/foo'); +//=> true +pathIsAbsolute.win32('/home/foo'); +//=> false +``` + + +## API + +See the [`path.isAbsolute()` docs](http://nodejs.org/api/path.html#path_path_isabsolute_path). + +### pathIsAbsolute(path) + +### pathIsAbsolute.posix(path) + +POSIX specific version. + +### pathIsAbsolute.win32(path) + +Windows specific version. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/path-is-inside/LICENSE.txt b/node_modules/path-is-inside/LICENSE.txt new file mode 100644 index 00000000..0bdbb61c --- /dev/null +++ b/node_modules/path-is-inside/LICENSE.txt @@ -0,0 +1,47 @@ +Dual licensed under WTFPL and MIT: + +--- + +Copyright © 2013–2016 Domenic Denicola + +This work is free. You can redistribute it and/or modify it under the +terms of the Do What The Fuck You Want To Public License, Version 2, +as published by Sam Hocevar. See below for more details. + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + Version 2, December 2004 + + Copyright (C) 2004 Sam Hocevar + + Everyone is permitted to copy and distribute verbatim or modified + copies of this license document, and changing it is allowed as long + as the name is changed. + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. You just DO WHAT THE FUCK YOU WANT TO. + +--- + +The MIT License (MIT) + +Copyright © 2013–2016 Domenic Denicola + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/path-is-inside/lib/path-is-inside.js b/node_modules/path-is-inside/lib/path-is-inside.js new file mode 100644 index 00000000..596dfd3b --- /dev/null +++ b/node_modules/path-is-inside/lib/path-is-inside.js @@ -0,0 +1,28 @@ +"use strict"; + +var path = require("path"); + +module.exports = function (thePath, potentialParent) { + // For inside-directory checking, we want to allow trailing slashes, so normalize. + thePath = stripTrailingSep(thePath); + potentialParent = stripTrailingSep(potentialParent); + + // Node treats only Windows as case-insensitive in its path module; we follow those conventions. + if (process.platform === "win32") { + thePath = thePath.toLowerCase(); + potentialParent = potentialParent.toLowerCase(); + } + + return thePath.lastIndexOf(potentialParent, 0) === 0 && + ( + thePath[potentialParent.length] === path.sep || + thePath[potentialParent.length] === undefined + ); +}; + +function stripTrailingSep(thePath) { + if (thePath[thePath.length - 1] === path.sep) { + return thePath.slice(0, -1); + } + return thePath; +} diff --git a/node_modules/path-is-inside/package.json b/node_modules/path-is-inside/package.json new file mode 100644 index 00000000..74c56e69 --- /dev/null +++ b/node_modules/path-is-inside/package.json @@ -0,0 +1,21 @@ +{ + "name": "path-is-inside", + "description": "Tests whether one path is inside another path", + "keywords": ["path", "directory", "folder", "inside", "relative"], + "version": "1.0.2", + "author": "Domenic Denicola (https://domenic.me)", + "license": "(WTFPL OR MIT)", + "repository": "domenic/path-is-inside", + "main": "lib/path-is-inside.js", + "files": [ + "lib" + ], + "scripts": { + "test": "mocha", + "lint": "jshint lib" + }, + "devDependencies": { + "jshint": "~2.3.0", + "mocha": "~1.15.1" + } +} diff --git a/node_modules/path-key/index.js b/node_modules/path-key/index.js new file mode 100644 index 00000000..62c8250a --- /dev/null +++ b/node_modules/path-key/index.js @@ -0,0 +1,13 @@ +'use strict'; +module.exports = opts => { + opts = opts || {}; + + const env = opts.env || process.env; + const platform = opts.platform || process.platform; + + if (platform !== 'win32') { + return 'PATH'; + } + + return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; +}; diff --git a/node_modules/path-key/license b/node_modules/path-key/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/path-key/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/path-key/package.json b/node_modules/path-key/package.json new file mode 100644 index 00000000..81e0e8be --- /dev/null +++ b/node_modules/path-key/package.json @@ -0,0 +1,39 @@ +{ + "name": "path-key", + "version": "2.0.1", + "description": "Get the PATH environment variable key cross-platform", + "license": "MIT", + "repository": "sindresorhus/path-key", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "path", + "key", + "environment", + "env", + "variable", + "var", + "get", + "cross-platform", + "windows" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/path-key/readme.md b/node_modules/path-key/readme.md new file mode 100644 index 00000000..cb5710aa --- /dev/null +++ b/node_modules/path-key/readme.md @@ -0,0 +1,51 @@ +# path-key [![Build Status](https://travis-ci.org/sindresorhus/path-key.svg?branch=master)](https://travis-ci.org/sindresorhus/path-key) + +> Get the [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) environment variable key cross-platform + +It's usually `PATH`, but on Windows it can be any casing like `Path`... + + +## Install + +``` +$ npm install --save path-key +``` + + +## Usage + +```js +const pathKey = require('path-key'); + +const key = pathKey(); +//=> 'PATH' + +const PATH = process.env[key]; +//=> '/usr/local/bin:/usr/bin:/bin' +``` + + +## API + +### pathKey([options]) + +#### options + +##### env + +Type: `Object`
+Default: [`process.env`](https://nodejs.org/api/process.html#process_process_env) + +Use a custom environment variables object. + +#### platform + +Type: `string`
+Default: [`process.platform`](https://nodejs.org/api/process.html#process_process_platform) + +Get the PATH key for a specific platform. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/path-to-regexp/History.md b/node_modules/path-to-regexp/History.md new file mode 100644 index 00000000..7f658784 --- /dev/null +++ b/node_modules/path-to-regexp/History.md @@ -0,0 +1,36 @@ +0.1.7 / 2015-07-28 +================== + + * Fixed regression with escaped round brackets and matching groups. + +0.1.6 / 2015-06-19 +================== + + * Replace `index` feature by outputting all parameters, unnamed and named. + +0.1.5 / 2015-05-08 +================== + + * Add an index property for position in match result. + +0.1.4 / 2015-03-05 +================== + + * Add license information + +0.1.3 / 2014-07-06 +================== + + * Better array support + * Improved support for trailing slash in non-ending mode + +0.1.0 / 2014-03-06 +================== + + * add options.end + +0.0.2 / 2013-02-10 +================== + + * Update to match current express + * add .license property to component.json diff --git a/node_modules/path-to-regexp/LICENSE b/node_modules/path-to-regexp/LICENSE new file mode 100644 index 00000000..983fbe8a --- /dev/null +++ b/node_modules/path-to-regexp/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/path-to-regexp/Readme.md b/node_modules/path-to-regexp/Readme.md new file mode 100644 index 00000000..95452a6e --- /dev/null +++ b/node_modules/path-to-regexp/Readme.md @@ -0,0 +1,35 @@ +# Path-to-RegExp + +Turn an Express-style path string such as `/user/:name` into a regular expression. + +**Note:** This is a legacy branch. You should upgrade to `1.x`. + +## Usage + +```javascript +var pathToRegexp = require('path-to-regexp'); +``` + +### pathToRegexp(path, keys, options) + + - **path** A string in the express format, an array of such strings, or a regular expression + - **keys** An array to be populated with the keys present in the url. Once the function completes, this will be an array of strings. + - **options** + - **options.sensitive** Defaults to false, set this to true to make routes case sensitive + - **options.strict** Defaults to false, set this to true to make the trailing slash matter. + - **options.end** Defaults to true, set this to false to only match the prefix of the URL. + +```javascript +var keys = []; +var exp = pathToRegexp('/foo/:bar', keys); +//keys = ['bar'] +//exp = /^\/foo\/(?:([^\/]+?))\/?$/i +``` + +## Live Demo + +You can see a live demo of this library in use at [express-route-tester](http://forbeslindesay.github.com/express-route-tester/). + +## License + + MIT diff --git a/node_modules/path-to-regexp/index.js b/node_modules/path-to-regexp/index.js new file mode 100644 index 00000000..500d1dad --- /dev/null +++ b/node_modules/path-to-regexp/index.js @@ -0,0 +1,129 @@ +/** + * Expose `pathtoRegexp`. + */ + +module.exports = pathtoRegexp; + +/** + * Match matching groups in a regular expression. + */ +var MATCHING_GROUP_REGEXP = /\((?!\?)/g; + +/** + * Normalize the given path string, + * returning a regular expression. + * + * An empty array should be passed, + * which will contain the placeholder + * key names. For example "/user/:id" will + * then contain ["id"]. + * + * @param {String|RegExp|Array} path + * @param {Array} keys + * @param {Object} options + * @return {RegExp} + * @api private + */ + +function pathtoRegexp(path, keys, options) { + options = options || {}; + keys = keys || []; + var strict = options.strict; + var end = options.end !== false; + var flags = options.sensitive ? '' : 'i'; + var extraOffset = 0; + var keysOffset = keys.length; + var i = 0; + var name = 0; + var m; + + if (path instanceof RegExp) { + while (m = MATCHING_GROUP_REGEXP.exec(path.source)) { + keys.push({ + name: name++, + optional: false, + offset: m.index + }); + } + + return path; + } + + if (Array.isArray(path)) { + // Map array parts into regexps and return their source. We also pass + // the same keys and options instance into every generation to get + // consistent matching groups before we join the sources together. + path = path.map(function (value) { + return pathtoRegexp(value, keys, options).source; + }); + + return new RegExp('(?:' + path.join('|') + ')', flags); + } + + path = ('^' + path + (strict ? '' : path[path.length - 1] === '/' ? '?' : '/?')) + .replace(/\/\(/g, '/(?:') + .replace(/([\/\.])/g, '\\$1') + .replace(/(\\\/)?(\\\.)?:(\w+)(\(.*?\))?(\*)?(\?)?/g, function (match, slash, format, key, capture, star, optional, offset) { + slash = slash || ''; + format = format || ''; + capture = capture || '([^\\/' + format + ']+?)'; + optional = optional || ''; + + keys.push({ + name: key, + optional: !!optional, + offset: offset + extraOffset + }); + + var result = '' + + (optional ? '' : slash) + + '(?:' + + format + (optional ? slash : '') + capture + + (star ? '((?:[\\/' + format + '].+?)?)' : '') + + ')' + + optional; + + extraOffset += result.length - match.length; + + return result; + }) + .replace(/\*/g, function (star, index) { + var len = keys.length + + while (len-- > keysOffset && keys[len].offset > index) { + keys[len].offset += 3; // Replacement length minus asterisk length. + } + + return '(.*)'; + }); + + // This is a workaround for handling unnamed matching groups. + while (m = MATCHING_GROUP_REGEXP.exec(path)) { + var escapeCount = 0; + var index = m.index; + + while (path.charAt(--index) === '\\') { + escapeCount++; + } + + // It's possible to escape the bracket. + if (escapeCount % 2 === 1) { + continue; + } + + if (keysOffset + i === keys.length || keys[keysOffset + i].offset > m.index) { + keys.splice(keysOffset + i, 0, { + name: name++, // Unnamed matching groups must be consistently linear. + optional: false, + offset: m.index + }); + } + + i++; + } + + // If the path is non-ending, match until the end or a slash. + path += (end ? '$' : (path[path.length - 1] === '/' ? '' : '(?=\\/|$)')); + + return new RegExp(path, flags); +}; diff --git a/node_modules/path-to-regexp/package.json b/node_modules/path-to-regexp/package.json new file mode 100644 index 00000000..d4e51b57 --- /dev/null +++ b/node_modules/path-to-regexp/package.json @@ -0,0 +1,30 @@ +{ + "name": "path-to-regexp", + "description": "Express style path to RegExp utility", + "version": "0.1.7", + "files": [ + "index.js", + "LICENSE" + ], + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "keywords": [ + "express", + "regexp" + ], + "component": { + "scripts": { + "path-to-regexp": "index.js" + } + }, + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/component/path-to-regexp.git" + }, + "devDependencies": { + "mocha": "^1.17.1", + "istanbul": "^0.2.6" + } +} diff --git a/node_modules/pify/index.js b/node_modules/pify/index.js new file mode 100644 index 00000000..1dee43ad --- /dev/null +++ b/node_modules/pify/index.js @@ -0,0 +1,84 @@ +'use strict'; + +const processFn = (fn, opts) => function () { + const P = opts.promiseModule; + const args = new Array(arguments.length); + + for (let i = 0; i < arguments.length; i++) { + args[i] = arguments[i]; + } + + return new P((resolve, reject) => { + if (opts.errorFirst) { + args.push(function (err, result) { + if (opts.multiArgs) { + const results = new Array(arguments.length - 1); + + for (let i = 1; i < arguments.length; i++) { + results[i - 1] = arguments[i]; + } + + if (err) { + results.unshift(err); + reject(results); + } else { + resolve(results); + } + } else if (err) { + reject(err); + } else { + resolve(result); + } + }); + } else { + args.push(function (result) { + if (opts.multiArgs) { + const results = new Array(arguments.length - 1); + + for (let i = 0; i < arguments.length; i++) { + results[i] = arguments[i]; + } + + resolve(results); + } else { + resolve(result); + } + }); + } + + fn.apply(this, args); + }); +}; + +module.exports = (obj, opts) => { + opts = Object.assign({ + exclude: [/.+(Sync|Stream)$/], + errorFirst: true, + promiseModule: Promise + }, opts); + + const filter = key => { + const match = pattern => typeof pattern === 'string' ? key === pattern : pattern.test(key); + return opts.include ? opts.include.some(match) : !opts.exclude.some(match); + }; + + let ret; + if (typeof obj === 'function') { + ret = function () { + if (opts.excludeMain) { + return obj.apply(this, arguments); + } + + return processFn(obj, opts).apply(this, arguments); + }; + } else { + ret = Object.create(Object.getPrototypeOf(obj)); + } + + for (const key in obj) { // eslint-disable-line guard-for-in + const x = obj[key]; + ret[key] = typeof x === 'function' && filter(key) ? processFn(x, opts) : x; + } + + return ret; +}; diff --git a/node_modules/pify/license b/node_modules/pify/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/pify/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pify/package.json b/node_modules/pify/package.json new file mode 100644 index 00000000..468d8576 --- /dev/null +++ b/node_modules/pify/package.json @@ -0,0 +1,51 @@ +{ + "name": "pify", + "version": "3.0.0", + "description": "Promisify a callback-style function", + "license": "MIT", + "repository": "sindresorhus/pify", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava && npm run optimization-test", + "optimization-test": "node --allow-natives-syntax optimization-test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promise", + "promises", + "promisify", + "all", + "denodify", + "denodeify", + "callback", + "cb", + "node", + "then", + "thenify", + "convert", + "transform", + "wrap", + "wrapper", + "bind", + "to", + "async", + "await", + "es2015", + "bluebird" + ], + "devDependencies": { + "ava": "*", + "pinkie-promise": "^2.0.0", + "v8-natives": "^1.0.0", + "xo": "*" + } +} diff --git a/node_modules/pify/readme.md b/node_modules/pify/readme.md new file mode 100644 index 00000000..376ca4e5 --- /dev/null +++ b/node_modules/pify/readme.md @@ -0,0 +1,131 @@ +# pify [![Build Status](https://travis-ci.org/sindresorhus/pify.svg?branch=master)](https://travis-ci.org/sindresorhus/pify) + +> Promisify a callback-style function + + +## Install + +``` +$ npm install --save pify +``` + + +## Usage + +```js +const fs = require('fs'); +const pify = require('pify'); + +// Promisify a single function +pify(fs.readFile)('package.json', 'utf8').then(data => { + console.log(JSON.parse(data).name); + //=> 'pify' +}); + +// Promisify all methods in a module +pify(fs).readFile('package.json', 'utf8').then(data => { + console.log(JSON.parse(data).name); + //=> 'pify' +}); +``` + + +## API + +### pify(input, [options]) + +Returns a `Promise` wrapped version of the supplied function or module. + +#### input + +Type: `Function` `Object` + +Callback-style function or module whose methods you want to promisify. + +#### options + +##### multiArgs + +Type: `boolean`
+Default: `false` + +By default, the promisified function will only return the second argument from the callback, which works fine for most APIs. This option can be useful for modules like `request` that return multiple arguments. Turning this on will make it return an array of all arguments from the callback, excluding the error argument, instead of just the second argument. This also applies to rejections, where it returns an array of all the callback arguments, including the error. + +```js +const request = require('request'); +const pify = require('pify'); + +pify(request, {multiArgs: true})('https://sindresorhus.com').then(result => { + const [httpResponse, body] = result; +}); +``` + +##### include + +Type: `string[]` `RegExp[]` + +Methods in a module to promisify. Remaining methods will be left untouched. + +##### exclude + +Type: `string[]` `RegExp[]`
+Default: `[/.+(Sync|Stream)$/]` + +Methods in a module **not** to promisify. Methods with names ending with `'Sync'` are excluded by default. + +##### excludeMain + +Type: `boolean`
+Default: `false` + +If given module is a function itself, it will be promisified. Turn this option on if you want to promisify only methods of the module. + +```js +const pify = require('pify'); + +function fn() { + return true; +} + +fn.method = (data, callback) => { + setImmediate(() => { + callback(null, data); + }); +}; + +// Promisify methods but not `fn()` +const promiseFn = pify(fn, {excludeMain: true}); + +if (promiseFn()) { + promiseFn.method('hi').then(data => { + console.log(data); + }); +} +``` + +##### errorFirst + +Type: `boolean`
+Default: `true` + +Whether the callback has an error as the first argument. You'll want to set this to `false` if you're dealing with an API that doesn't have an error as the first argument, like `fs.exists()`, some browser APIs, Chrome Extension APIs, etc. + +##### promiseModule + +Type: `Function` + +Custom promise module to use instead of the native one. + +Check out [`pinkie-promise`](https://github.com/floatdrop/pinkie-promise) if you need a tiny promise polyfill. + + +## Related + +- [p-event](https://github.com/sindresorhus/p-event) - Promisify an event by waiting for it to be emitted +- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/pirates/LICENSE b/node_modules/pirates/LICENSE new file mode 100644 index 00000000..acc7a0e0 --- /dev/null +++ b/node_modules/pirates/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016-2018 Ari Porad + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pirates/README.md b/node_modules/pirates/README.md new file mode 100644 index 00000000..68e25812 --- /dev/null +++ b/node_modules/pirates/README.md @@ -0,0 +1,89 @@ +# Pirates [![Version][version-badge]][npm-link] [![Build Status][build-badge]][build-link] [![Coverage][codecov-badge]][codecov-link] [![Commitizen friendly][cz-badge]][cz-link] [![semantic-release][sr-badge]][sr-link] [![MIT License][license-badge]][license-link] + +### Properly hijack require + +[version-badge]: https://img.shields.io/npm/v/pirates.svg "npm version" +[downloads-badge]: https://img.shields.io/npm/dm/pirates.svg "npm downloads" +[npm-link]: http://npm.im/pirates "npm" + +[codecov-badge]: https://img.shields.io/codecov/c/github/ariporad/pirates/master.svg?style=flat "codecov" +[codecov-link]: https://codecov.io/gh/ariporad/pirates "codecov" + +[license-badge]: https://img.shields.io/npm/l/express.svg "MIT License" +[license-link]: http://ariporad.mit-license.org "MIT License" + +[build-badge]: https://travis-ci.org/ariporad/pirates.svg "Travis CI Build Status" +[build-link]: https://travis-ci.org/ariporad/pirates "Travis CI Build Status" + +[cz-badge]: https://img.shields.io/badge/commitizen-friendly-brightgreen.svg "Commitizen friendly" +[cz-link]: http://commitizen.github.io/cz-cli/ "Commitizen friendly" + +[sr-badge]: https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg +[sr-link]: https://github.com/semantic-release/semantic-release + +## Why? + +Two reasons: +1. Babel and istanbul were breaking each other. +2. Everyone seemed to re-invent the wheel on this, and everyone wanted a solution that was DRY, simple, easy to use, +and made everything Just Work™, while allowing multiple require hooks, in a fashion similar to calling `super`. + +For some context, see [the Babel issue thread][] which started this all, then [the nyc issue thread][], where +discussion was moved (as we began to discuss just using the code nyc had developed), and finally to [#1][issue-1] +where discussion was finally moved. + +[the Babel issue thread]: https://github.com/babel/babel/pull/3062 "Babel Issue Thread" +[the nyc issue thread]: https://github.com/bcoe/nyc/issues/70 "NYC Issue Thread" +[issue-1]: https://github.com/ariporad/pirates/issues/1 "Issue #1" + +## Installation + + npm install --save pirates + +## Usage + +Using pirates is really easy: +```javascript +// my-module/register.js +const addHook = require('pirates').addHook; +// Or if you use ES modules +// import { addHook } from 'pirates'; + +function matcher(filename) { + // Here, you can inspect the filename to determine if it should be hooked or + // not. Just return a truthy/falsey. Files in node_modules are automatically ignored, + // unless otherwise specified in options (see below). + + // TODO: Implement your logic here + return true; +} + +const revert = addHook( + (code, filename) => code.replace('@@foo', 'console.log(\'foo\');'), + { exts: ['.js'], matcher } +); + +// And later, if you want to un-hook require, you can just do: +revert(); +``` + +## API + +### pirates.addHook(hook, [opts={ [matcher: true], [exts: ['.js']], [ignoreNodeModules: true] }]); +Add a require hook. `hook` must be a function that takes `(code, filename)`, and returns the modified code. `opts` is +an optional options object. Available options are: `matcher`, which is a function that accepts a filename, and +returns a truthy value if the file should be hooked (defaults to a function that always returns true), falsey if +otherwise; `exts`, which is an array of extensions to hook, they should begin with `.` (defaults to `['.js']`); +`ignoreNodeModules`, if true, any file in a `node_modules` folder wont be hooked (the matcher also wont be called), +if false, then the matcher will be called for any files in `node_modules` (defaults to true). + + +## Projects that use Pirates + +See the [wiki page](https://github.com/ariporad/pirates/wiki/Projects-using-Pirates). If you add Pirates to your project, +(And you should! It works best if everyone uses it. Then we can have a happy world full of happy require hooks!), please +add yourself to the wiki. + +## License + +[MIT](http://ariporad.mit-license.org) diff --git a/node_modules/pirates/index.d.ts b/node_modules/pirates/index.d.ts new file mode 100644 index 00000000..63d0b740 --- /dev/null +++ b/node_modules/pirates/index.d.ts @@ -0,0 +1,30 @@ +/* (c) 2015 Ari Porad (@ariporad) . License: ariporad.mit-license.org */ + +declare type Hook = (code: string, filename: string) => string; +declare type Matcher = (code: string) => boolean; +declare type RevertFunction = () => void; +interface Options { + /** A matcher function, will be called with path to a file. Should return truthy if the file should be hooked, falsy otherwise. */ + matcher?: Matcher; + /** + * The extensions to hook. Should start with '.' (ex. ['.js']). + * + * @default ['.js'] + */ + exts?: Array; + /** + * Auto-ignore node_modules. Independent of any matcher. + * + * @default true + */ + ignoreNodeModules?: boolean; +} +/** + * Add a require hook. + * + * @param {Hook} hook - The hook. Accepts the code of the module and the filename. Required. + * @param {Options} [opts] - Options + * @returns {RevertFunction} revert - Reverts the hooks. + */ +export declare function addHook(hook: Hook, opts?: Options): RevertFunction; +export {}; diff --git a/node_modules/pirates/lib/index.js b/node_modules/pirates/lib/index.js new file mode 100644 index 00000000..bfe26ae4 --- /dev/null +++ b/node_modules/pirates/lib/index.js @@ -0,0 +1,118 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.addHook = addHook; + +var _module = _interopRequireDefault(require("module")); + +var _path = _interopRequireDefault(require("path")); + +var _nodeModulesRegexp = _interopRequireDefault(require("node-modules-regexp")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/* (c) 2015 Ari Porad (@ariporad) . License: ariporad.mit-license.org */ +// Guard against poorly mocked module constructors. +const Module = module.constructor.length > 1 ? module.constructor : _module.default; +const HOOK_RETURNED_NOTHING_ERROR_MESSAGE = '[Pirates] A hook returned a non-string, or nothing at all! This is a' + ' violation of intergalactic law!\n' + '--------------------\n' + 'If you have no idea what this means or what Pirates is, let me explain: ' + 'Pirates is a module that makes is easy to implement require hooks. One of' + " the require hooks you're using uses it. One of these require hooks" + " didn't return anything from it's handler, so we don't know what to" + ' do. You might want to debug this.'; + +function shouldCompile(filename, exts, matcher, ignoreNodeModules) { + if (typeof filename !== 'string') { + return false; + } + + if (exts.indexOf(_path.default.extname(filename)) === -1) { + return false; + } + + const resolvedFilename = _path.default.resolve(filename); + + if (ignoreNodeModules && _nodeModulesRegexp.default.test(resolvedFilename)) { + return false; + } + + if (matcher && typeof matcher === 'function') { + return !!matcher(resolvedFilename); + } + + return true; +} +/** + * Add a require hook. + * + * @param {Function} hook - The hook. Accepts the code of the module and the filename. Required. + * @param {Object} [opts] - Options + * @param {String[]} [opts.exts=['.js']] - The extensions to hook. Should start with '.' (ex. ['.js']). + * @param {Function(path)} [opts.matcher] - A matcher function, will be called with path to a file. Should return truthy if the file should be hooked, falsy otherwise. + * @param {Boolean} [opts.ignoreNodeModules=true] - Auto-ignore node_modules. Independent of any matcher. + * @returns {Function} revert - Reverts the hooks. + */ + + +function addHook(hook, opts = {}) { + let reverted = false; + const loaders = []; + const oldLoaders = []; + let exts; // We need to do this to fix #15. Basically, if you use a non-standard extension (ie. .jsx), then + // We modify the .js loader, then use the modified .js loader for as the base for .jsx. + // This prevents that. + + const originalJSLoader = Module._extensions['.js']; + const matcher = opts.matcher || null; + const ignoreNodeModules = opts.ignoreNodeModules !== false; + exts = opts.extensions || opts.exts || opts.extension || opts.ext || ['.js']; + + if (!Array.isArray(exts)) { + exts = [exts]; + } + + exts.forEach(ext => { + if (typeof ext !== 'string') { + throw new TypeError(`Invalid Extension: ${ext}`); + } + + const oldLoader = Module._extensions[ext] || originalJSLoader; + oldLoaders[ext] = oldLoader; + + loaders[ext] = Module._extensions[ext] = function newLoader(mod, filename) { + let compile; + + if (!reverted) { + if (shouldCompile(filename, exts, matcher, ignoreNodeModules)) { + compile = mod._compile; + + mod._compile = function _compile(code) { + // reset the compile immediately as otherwise we end up having the + // compile function being changed even though this loader might be reverted + // Not reverting it here leads to long useless compile chains when doing + // addHook -> revert -> addHook -> revert -> ... + // The compile function is also anyway created new when the loader is called a second time. + mod._compile = compile; + const newCode = hook(code, filename); + + if (typeof newCode !== 'string') { + throw new Error(HOOK_RETURNED_NOTHING_ERROR_MESSAGE); + } + + return mod._compile(newCode, filename); + }; + } + } + + oldLoader(mod, filename); + }; + }); + return function revert() { + if (reverted) return; + reverted = true; + exts.forEach(ext => { + // if the current loader for the extension is our loader then unregister it and set the oldLoader again + // if not we can not do anything as we cannot remove a loader from within the loader-chain + if (Module._extensions[ext] === loaders[ext]) { + Module._extensions[ext] = oldLoaders[ext]; + } + }); + }; +} \ No newline at end of file diff --git a/node_modules/pirates/package.json b/node_modules/pirates/package.json new file mode 100644 index 00000000..4115f1ed --- /dev/null +++ b/node_modules/pirates/package.json @@ -0,0 +1,84 @@ +{ + "name": "pirates", + "description": "Properly hijack require", + "main": "lib/index.js", + "types": "index.d.ts", + "scripts": { + "clean": "rimraf lib", + "build": "babel src -d lib", + "test": "yarn run lint && cross-env BABEL_ENV=test yarn run build && nyc ava", + "lint": "eslint --report-unused-disable-directives .", + "prepublish": "yarn run clean && yarn run build" + }, + "files": [ + "lib", + "index.d.ts" + ], + "repository": { + "type": "git", + "url": "https://github.com/ariporad/pirates.git" + }, + "engines": { + "node": ">= 6" + }, + "author": { + "name": "Ari Porad", + "email": "ari@ariporad.com", + "url": "http://ariporad.com" + }, + "dependencies": { + "node-modules-regexp": "^1.0.0" + }, + "devDependencies": { + "@babel/cli": "^7.0.0", + "@babel/core": "^7.0.0", + "@babel/preset-env": "^7.0.0", + "ava": "^1.2.1", + "babel-core": "^7.0.0-0", + "babel-eslint": "^10.0.1", + "babel-plugin-istanbul": "^5.1.0", + "cross-env": "^5.0.5", + "cz-conventional-changelog": "^2.0.0", + "decache": "^4.1.0", + "eslint": "^5.1.0", + "eslint-config-prettier": "^4.0.0", + "eslint-plugin-import": "^2.2.0", + "eslint-plugin-prettier": "^3.0.1", + "mock-require": "^3.0.2", + "nyc": "^13.2.0", + "prettier": "^1.16.4", + "rewire": "^4.0.1", + "rimraf": "^2.6.1", + "semantic-release": "^15.7.0" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/ariporad/pirates/issues" + }, + "homepage": "https://github.com/ariporad/pirates#readme", + "config": { + "commitizen": { + "path": "cz-conventional-changelog" + } + }, + "ava": { + "files": [ + "test/*.js" + ], + "sources": [ + "lib/**/*.js" + ] + }, + "nyc": { + "include": [ + "src/*.js" + ], + "reporter": [ + "json", + "text" + ], + "sourceMap": false, + "instrument": false + }, + "version": "4.0.1" +} diff --git a/node_modules/posix-character-classes/LICENSE b/node_modules/posix-character-classes/LICENSE new file mode 100644 index 00000000..b11cb796 --- /dev/null +++ b/node_modules/posix-character-classes/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016-2017, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/posix-character-classes/README.md b/node_modules/posix-character-classes/README.md new file mode 100644 index 00000000..894ae237 --- /dev/null +++ b/node_modules/posix-character-classes/README.md @@ -0,0 +1,103 @@ +# posix-character-classes [![NPM version](https://img.shields.io/npm/v/posix-character-classes.svg?style=flat)](https://www.npmjs.com/package/posix-character-classes) [![NPM monthly downloads](https://img.shields.io/npm/dm/posix-character-classes.svg?style=flat)](https://npmjs.org/package/posix-character-classes) [![NPM total downloads](https://img.shields.io/npm/dt/posix-character-classes.svg?style=flat)](https://npmjs.org/package/posix-character-classes) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/posix-character-classes.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/posix-character-classes) + +> POSIX character classes for creating regular expressions. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save posix-character-classes +``` + +Install with [yarn](https://yarnpkg.com): + +```sh +$ yarn add posix-character-classes +``` + +## Usage + +```js +var posix = require('posix-character-classes'); +console.log(posix.alpha); +//=> 'A-Za-z' +``` + +## POSIX Character classes + +The POSIX standard supports the following classes or categories of charactersh (note that classes must be defined within brackets)[1]: + +| **POSIX class** | **Equivalent to** | **Matches** | +| --- | --- | --- | +| `[:alnum:]` | `[A-Za-z0-9]` | digits, uppercase and lowercase letters | +| `[:alpha:]` | `[A-Za-z]` | upper- and lowercase letters | +| `[:ascii:]` | `[\x00-\x7F]` | ASCII characters | +| `[:blank:]` | `[ \t]` | space and TAB characters only | +| `[:cntrl:]` | `[\x00-\x1F\x7F]` | Control characters | +| `[:digit:]` | `[0-9]` | digits | +| `[:graph:]` | `[^[:cntrl:]]` | graphic characters (all characters which have graphic representation) | +| `[:lower:]` | `[a-z]` | lowercase letters | +| `[:print:]` | `[[:graph] ]` | graphic characters and space | +| `[:punct:]` | ``[-!"#$%&'()*+,./:;<=>?@[]^_`{ | }~]`` | all punctuation characters (all graphic characters except letters and digits) | +| `[:space:]` | `[ \t\n\r\f\v]` | all blank (whitespace) characters, including spaces, tabs, new lines, carriage returns, form feeds, and vertical tabs | +| `[:upper:]` | `[A-Z]` | uppercase letters | +| `[:word:]` | `[A-Za-z0-9_]` | word characters | +| `[:xdigit:]` | `[0-9A-Fa-f]` | hexadecimal digits | + +## Examples + +* `a[[:digit:]]b` matches `a0b`, `a1b`, ..., `a9b`. +* `a[:digit:]b` is invalid, character classes must be enclosed in brackets +* `[[:digit:]abc]` matches any digit, as well as `a`, `b`, and `c`. +* `[abc[:digit:]]` is the same as the previous, matching any digit, as well as `a`, `b`, and `c` +* `[^ABZ[:lower:]]` matches any character except lowercase letters, `A`, `B`, and `Z`. + +## About + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.5.0, on April 20, 2017._ + +
+
+
    +
  1. table and examples are based on the WikiBooks page for [Regular Expressions/POSIX Basic Regular Expressions](https://en.wikibooks.org/wiki/Regular_Expressions/POSIX_Basic_Regular_Expressions), which is available under the [Creative Commons Attribution-ShareAlike License](https://creativecommons.org/licenses/by-sa/3.0/). + +
  2. +
+
\ No newline at end of file diff --git a/node_modules/posix-character-classes/index.js b/node_modules/posix-character-classes/index.js new file mode 100644 index 00000000..19f1c986 --- /dev/null +++ b/node_modules/posix-character-classes/index.js @@ -0,0 +1,22 @@ +'use strict'; + +/** + * POSIX character classes + */ + +module.exports = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + ascii: '\\x00-\\x7F', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E ', + punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9' +}; diff --git a/node_modules/posix-character-classes/package.json b/node_modules/posix-character-classes/package.json new file mode 100644 index 00000000..acce4ba7 --- /dev/null +++ b/node_modules/posix-character-classes/package.json @@ -0,0 +1,54 @@ +{ + "name": "posix-character-classes", + "description": "POSIX character classes for creating regular expressions.", + "version": "0.1.1", + "homepage": "https://github.com/jonschlinkert/posix-character-classes", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/posix-character-classes", + "bugs": { + "url": "https://github.com/jonschlinkert/posix-character-classes/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.12", + "mocha": "^3.2.0" + }, + "keywords": [ + "character", + "classes", + "posix" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "reflinks": [ + "verb", + "verb-generate-readme" + ], + "lint": { + "reflinks": true + }, + "related-list": [ + "micromatch", + "nanomatch", + "extglob", + "expand-brackets" + ] + } +} diff --git a/node_modules/prepend-http/index.js b/node_modules/prepend-http/index.js new file mode 100644 index 00000000..60f532ac --- /dev/null +++ b/node_modules/prepend-http/index.js @@ -0,0 +1,14 @@ +'use strict'; +module.exports = function (url) { + if (typeof url !== 'string') { + throw new TypeError('Expected a string, got ' + typeof url); + } + + url = url.trim(); + + if (/^\.*\/|^(?!localhost)\w+:/.test(url)) { + return url; + } + + return url.replace(/^(?!(?:\w+:)?\/\/)/, 'http://'); +}; diff --git a/node_modules/prepend-http/license b/node_modules/prepend-http/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/prepend-http/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/prepend-http/package.json b/node_modules/prepend-http/package.json new file mode 100644 index 00000000..75954f70 --- /dev/null +++ b/node_modules/prepend-http/package.json @@ -0,0 +1,35 @@ +{ + "name": "prepend-http", + "version": "1.0.4", + "description": "Prepend `http://` to humanized URLs like todomvc.com and localhost", + "license": "MIT", + "repository": "sindresorhus/prepend-http", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "prepend", + "protocol", + "scheme", + "url", + "uri", + "http", + "https", + "humanized" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/prepend-http/readme.md b/node_modules/prepend-http/readme.md new file mode 100644 index 00000000..df7557e3 --- /dev/null +++ b/node_modules/prepend-http/readme.md @@ -0,0 +1,31 @@ +# prepend-http [![Build Status](https://travis-ci.org/sindresorhus/prepend-http.svg?branch=master)](https://travis-ci.org/sindresorhus/prepend-http) + +> Prepend `http://` to humanized URLs like `todomvc.com` and `localhost` + + +## Install + +``` +$ npm install --save prepend-http +``` + + +## Usage + +```js +const prependHttp = require('prepend-http'); + +prependHttp('todomvc.com'); +//=> 'http://todomvc.com' + +prependHttp('localhost'); +//=> 'http://localhost' + +prependHttp('http://todomvc.com'); +//=> 'http://todomvc.com' +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/process-nextick-args/index.js b/node_modules/process-nextick-args/index.js new file mode 100644 index 00000000..3eecf114 --- /dev/null +++ b/node_modules/process-nextick-args/index.js @@ -0,0 +1,45 @@ +'use strict'; + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} + diff --git a/node_modules/process-nextick-args/license.md b/node_modules/process-nextick-args/license.md new file mode 100644 index 00000000..c67e3532 --- /dev/null +++ b/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/node_modules/process-nextick-args/package.json b/node_modules/process-nextick-args/package.json new file mode 100644 index 00000000..6070b723 --- /dev/null +++ b/node_modules/process-nextick-args/package.json @@ -0,0 +1,25 @@ +{ + "name": "process-nextick-args", + "version": "2.0.1", + "description": "process.nextTick but always with args", + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "author": "", + "license": "MIT", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "devDependencies": { + "tap": "~0.2.6" + } +} diff --git a/node_modules/process-nextick-args/readme.md b/node_modules/process-nextick-args/readme.md new file mode 100644 index 00000000..ecb432c9 --- /dev/null +++ b/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var pna = require('process-nextick-args'); + +pna.nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/node_modules/proxy-addr/HISTORY.md b/node_modules/proxy-addr/HISTORY.md new file mode 100644 index 00000000..9765f089 --- /dev/null +++ b/node_modules/proxy-addr/HISTORY.md @@ -0,0 +1,150 @@ +2.0.5 / 2019-04-16 +================== + + * deps: ipaddr.js@1.9.0 + +2.0.4 / 2018-07-26 +================== + + * deps: ipaddr.js@1.8.0 + +2.0.3 / 2018-02-19 +================== + + * deps: ipaddr.js@1.6.0 + +2.0.2 / 2017-09-24 +================== + + * deps: forwarded@~0.1.2 + - perf: improve header parsing + - perf: reduce overhead when no `X-Forwarded-For` header + +2.0.1 / 2017-09-10 +================== + + * deps: forwarded@~0.1.1 + - Fix trimming leading / trailing OWS + - perf: hoist regular expression + * deps: ipaddr.js@1.5.2 + +2.0.0 / 2017-08-08 +================== + + * Drop support for Node.js below 0.10 + +1.1.5 / 2017-07-25 +================== + + * Fix array argument being altered + * deps: ipaddr.js@1.4.0 + +1.1.4 / 2017-03-24 +================== + + * deps: ipaddr.js@1.3.0 + +1.1.3 / 2017-01-14 +================== + + * deps: ipaddr.js@1.2.0 + +1.1.2 / 2016-05-29 +================== + + * deps: ipaddr.js@1.1.1 + - Fix IPv6-mapped IPv4 validation edge cases + +1.1.1 / 2016-05-03 +================== + + * Fix regression matching mixed versions against multiple subnets + +1.1.0 / 2016-05-01 +================== + + * Fix accepting various invalid netmasks + - IPv4 netmasks must be contingous + - IPv6 addresses cannot be used as a netmask + * deps: ipaddr.js@1.1.0 + +1.0.10 / 2015-12-09 +=================== + + * deps: ipaddr.js@1.0.5 + - Fix regression in `isValid` with non-string arguments + +1.0.9 / 2015-12-01 +================== + + * deps: ipaddr.js@1.0.4 + - Fix accepting some invalid IPv6 addresses + - Reject CIDRs with negative or overlong masks + * perf: enable strict mode + +1.0.8 / 2015-05-10 +================== + + * deps: ipaddr.js@1.0.1 + +1.0.7 / 2015-03-16 +================== + + * deps: ipaddr.js@0.1.9 + - Fix OOM on certain inputs to `isValid` + +1.0.6 / 2015-02-01 +================== + + * deps: ipaddr.js@0.1.8 + +1.0.5 / 2015-01-08 +================== + + * deps: ipaddr.js@0.1.6 + +1.0.4 / 2014-11-23 +================== + + * deps: ipaddr.js@0.1.5 + - Fix edge cases with `isValid` + +1.0.3 / 2014-09-21 +================== + + * Use `forwarded` npm module + +1.0.2 / 2014-09-18 +================== + + * Fix a global leak when multiple subnets are trusted + * Support Node.js 0.6 + * deps: ipaddr.js@0.1.3 + +1.0.1 / 2014-06-03 +================== + + * Fix links in npm package + +1.0.0 / 2014-05-08 +================== + + * Add `trust` argument to determine proxy trust on + * Accepts custom function + * Accepts IPv4/IPv6 address(es) + * Accepts subnets + * Accepts pre-defined names + * Add optional `trust` argument to `proxyaddr.all` to + stop at first untrusted + * Add `proxyaddr.compile` to pre-compile `trust` function + to make subsequent calls faster + +0.0.1 / 2014-05-04 +================== + + * Fix bad npm publish + +0.0.0 / 2014-05-04 +================== + + * Initial release diff --git a/node_modules/proxy-addr/LICENSE b/node_modules/proxy-addr/LICENSE new file mode 100644 index 00000000..cab251c2 --- /dev/null +++ b/node_modules/proxy-addr/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/proxy-addr/README.md b/node_modules/proxy-addr/README.md new file mode 100644 index 00000000..8c176ea5 --- /dev/null +++ b/node_modules/proxy-addr/README.md @@ -0,0 +1,155 @@ +# proxy-addr + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Determine address of proxied request + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install proxy-addr +``` + +## API + + + +```js +var proxyaddr = require('proxy-addr') +``` + +### proxyaddr(req, trust) + +Return the address of the request, using the given `trust` parameter. + +The `trust` argument is a function that returns `true` if you trust +the address, `false` if you don't. The closest untrusted address is +returned. + + + +```js +proxyaddr(req, function (addr) { return addr === '127.0.0.1' }) +proxyaddr(req, function (addr, i) { return i < 1 }) +``` + +The `trust` arugment may also be a single IP address string or an +array of trusted addresses, as plain IP addresses, CIDR-formatted +strings, or IP/netmask strings. + + + +```js +proxyaddr(req, '127.0.0.1') +proxyaddr(req, ['127.0.0.0/8', '10.0.0.0/8']) +proxyaddr(req, ['127.0.0.0/255.0.0.0', '192.168.0.0/255.255.0.0']) +``` + +This module also supports IPv6. Your IPv6 addresses will be normalized +automatically (i.e. `fe80::00ed:1` equals `fe80:0:0:0:0:0:ed:1`). + + + +```js +proxyaddr(req, '::1') +proxyaddr(req, ['::1/128', 'fe80::/10']) +``` + +This module will automatically work with IPv4-mapped IPv6 addresses +as well to support node.js in IPv6-only mode. This means that you do +not have to specify both `::ffff:a00:1` and `10.0.0.1`. + +As a convenience, this module also takes certain pre-defined names +in addition to IP addresses, which expand into IP addresses: + + + +```js +proxyaddr(req, 'loopback') +proxyaddr(req, ['loopback', 'fc00:ac:1ab5:fff::1/64']) +``` + + * `loopback`: IPv4 and IPv6 loopback addresses (like `::1` and + `127.0.0.1`). + * `linklocal`: IPv4 and IPv6 link-local addresses (like + `fe80::1:1:1:1` and `169.254.0.1`). + * `uniquelocal`: IPv4 private addresses and IPv6 unique-local + addresses (like `fc00:ac:1ab5:fff::1` and `192.168.0.1`). + +When `trust` is specified as a function, it will be called for each +address to determine if it is a trusted address. The function is +given two arguments: `addr` and `i`, where `addr` is a string of +the address to check and `i` is a number that represents the distance +from the socket address. + +### proxyaddr.all(req, [trust]) + +Return all the addresses of the request, optionally stopping at the +first untrusted. This array is ordered from closest to furthest +(i.e. `arr[0] === req.connection.remoteAddress`). + + + +```js +proxyaddr.all(req) +``` + +The optional `trust` argument takes the same arguments as `trust` +does in `proxyaddr(req, trust)`. + + + +```js +proxyaddr.all(req, 'loopback') +``` + +### proxyaddr.compile(val) + +Compiles argument `val` into a `trust` function. This function takes +the same arguments as `trust` does in `proxyaddr(req, trust)` and +returns a function suitable for `proxyaddr(req, trust)`. + + + +```js +var trust = proxyaddr.compile('loopback') +var addr = proxyaddr(req, trust) +``` + +This function is meant to be optimized for use against every request. +It is recommend to compile a trust function up-front for the trusted +configuration and pass that to `proxyaddr(req, trust)` for each request. + +## Testing + +```sh +$ npm test +``` + +## Benchmarks + +```sh +$ npm run-script bench +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/proxy-addr/master +[coveralls-url]: https://coveralls.io/r/jshttp/proxy-addr?branch=master +[node-image]: https://badgen.net/npm/node/proxy-addr +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/proxy-addr +[npm-url]: https://npmjs.org/package/proxy-addr +[npm-version-image]: https://badgen.net/npm/v/proxy-addr +[travis-image]: https://badgen.net/travis/jshttp/proxy-addr/master +[travis-url]: https://travis-ci.org/jshttp/proxy-addr diff --git a/node_modules/proxy-addr/index.js b/node_modules/proxy-addr/index.js new file mode 100644 index 00000000..8bb7d336 --- /dev/null +++ b/node_modules/proxy-addr/index.js @@ -0,0 +1,327 @@ +/*! + * proxy-addr + * Copyright(c) 2014-2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = proxyaddr +module.exports.all = alladdrs +module.exports.compile = compile + +/** + * Module dependencies. + * @private + */ + +var forwarded = require('forwarded') +var ipaddr = require('ipaddr.js') + +/** + * Variables. + * @private + */ + +var DIGIT_REGEXP = /^[0-9]+$/ +var isip = ipaddr.isValid +var parseip = ipaddr.parse + +/** + * Pre-defined IP ranges. + * @private + */ + +var IP_RANGES = { + linklocal: ['169.254.0.0/16', 'fe80::/10'], + loopback: ['127.0.0.1/8', '::1/128'], + uniquelocal: ['10.0.0.0/8', '172.16.0.0/12', '192.168.0.0/16', 'fc00::/7'] +} + +/** + * Get all addresses in the request, optionally stopping + * at the first untrusted. + * + * @param {Object} request + * @param {Function|Array|String} [trust] + * @public + */ + +function alladdrs (req, trust) { + // get addresses + var addrs = forwarded(req) + + if (!trust) { + // Return all addresses + return addrs + } + + if (typeof trust !== 'function') { + trust = compile(trust) + } + + for (var i = 0; i < addrs.length - 1; i++) { + if (trust(addrs[i], i)) continue + + addrs.length = i + 1 + } + + return addrs +} + +/** + * Compile argument into trust function. + * + * @param {Array|String} val + * @private + */ + +function compile (val) { + if (!val) { + throw new TypeError('argument is required') + } + + var trust + + if (typeof val === 'string') { + trust = [val] + } else if (Array.isArray(val)) { + trust = val.slice() + } else { + throw new TypeError('unsupported trust argument') + } + + for (var i = 0; i < trust.length; i++) { + val = trust[i] + + if (!IP_RANGES.hasOwnProperty(val)) { + continue + } + + // Splice in pre-defined range + val = IP_RANGES[val] + trust.splice.apply(trust, [i, 1].concat(val)) + i += val.length - 1 + } + + return compileTrust(compileRangeSubnets(trust)) +} + +/** + * Compile `arr` elements into range subnets. + * + * @param {Array} arr + * @private + */ + +function compileRangeSubnets (arr) { + var rangeSubnets = new Array(arr.length) + + for (var i = 0; i < arr.length; i++) { + rangeSubnets[i] = parseipNotation(arr[i]) + } + + return rangeSubnets +} + +/** + * Compile range subnet array into trust function. + * + * @param {Array} rangeSubnets + * @private + */ + +function compileTrust (rangeSubnets) { + // Return optimized function based on length + var len = rangeSubnets.length + return len === 0 + ? trustNone + : len === 1 + ? trustSingle(rangeSubnets[0]) + : trustMulti(rangeSubnets) +} + +/** + * Parse IP notation string into range subnet. + * + * @param {String} note + * @private + */ + +function parseipNotation (note) { + var pos = note.lastIndexOf('/') + var str = pos !== -1 + ? note.substring(0, pos) + : note + + if (!isip(str)) { + throw new TypeError('invalid IP address: ' + str) + } + + var ip = parseip(str) + + if (pos === -1 && ip.kind() === 'ipv6' && ip.isIPv4MappedAddress()) { + // Store as IPv4 + ip = ip.toIPv4Address() + } + + var max = ip.kind() === 'ipv6' + ? 128 + : 32 + + var range = pos !== -1 + ? note.substring(pos + 1, note.length) + : null + + if (range === null) { + range = max + } else if (DIGIT_REGEXP.test(range)) { + range = parseInt(range, 10) + } else if (ip.kind() === 'ipv4' && isip(range)) { + range = parseNetmask(range) + } else { + range = null + } + + if (range <= 0 || range > max) { + throw new TypeError('invalid range on address: ' + note) + } + + return [ip, range] +} + +/** + * Parse netmask string into CIDR range. + * + * @param {String} netmask + * @private + */ + +function parseNetmask (netmask) { + var ip = parseip(netmask) + var kind = ip.kind() + + return kind === 'ipv4' + ? ip.prefixLengthFromSubnetMask() + : null +} + +/** + * Determine address of proxied request. + * + * @param {Object} request + * @param {Function|Array|String} trust + * @public + */ + +function proxyaddr (req, trust) { + if (!req) { + throw new TypeError('req argument is required') + } + + if (!trust) { + throw new TypeError('trust argument is required') + } + + var addrs = alladdrs(req, trust) + var addr = addrs[addrs.length - 1] + + return addr +} + +/** + * Static trust function to trust nothing. + * + * @private + */ + +function trustNone () { + return false +} + +/** + * Compile trust function for multiple subnets. + * + * @param {Array} subnets + * @private + */ + +function trustMulti (subnets) { + return function trust (addr) { + if (!isip(addr)) return false + + var ip = parseip(addr) + var ipconv + var kind = ip.kind() + + for (var i = 0; i < subnets.length; i++) { + var subnet = subnets[i] + var subnetip = subnet[0] + var subnetkind = subnetip.kind() + var subnetrange = subnet[1] + var trusted = ip + + if (kind !== subnetkind) { + if (subnetkind === 'ipv4' && !ip.isIPv4MappedAddress()) { + // Incompatible IP addresses + continue + } + + if (!ipconv) { + // Convert IP to match subnet IP kind + ipconv = subnetkind === 'ipv4' + ? ip.toIPv4Address() + : ip.toIPv4MappedAddress() + } + + trusted = ipconv + } + + if (trusted.match(subnetip, subnetrange)) { + return true + } + } + + return false + } +} + +/** + * Compile trust function for single subnet. + * + * @param {Object} subnet + * @private + */ + +function trustSingle (subnet) { + var subnetip = subnet[0] + var subnetkind = subnetip.kind() + var subnetisipv4 = subnetkind === 'ipv4' + var subnetrange = subnet[1] + + return function trust (addr) { + if (!isip(addr)) return false + + var ip = parseip(addr) + var kind = ip.kind() + + if (kind !== subnetkind) { + if (subnetisipv4 && !ip.isIPv4MappedAddress()) { + // Incompatible IP addresses + return false + } + + // Convert IP to match subnet IP kind + ip = subnetisipv4 + ? ip.toIPv4Address() + : ip.toIPv4MappedAddress() + } + + return ip.match(subnetip, subnetrange) + } +} diff --git a/node_modules/proxy-addr/package.json b/node_modules/proxy-addr/package.json new file mode 100644 index 00000000..f0f6139e --- /dev/null +++ b/node_modules/proxy-addr/package.json @@ -0,0 +1,47 @@ +{ + "name": "proxy-addr", + "description": "Determine address of proxied request", + "version": "2.0.5", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "ip", + "proxy", + "x-forwarded-for" + ], + "repository": "jshttp/proxy-addr", + "dependencies": { + "forwarded": "~0.1.2", + "ipaddr.js": "1.9.0" + }, + "devDependencies": { + "benchmark": "2.1.4", + "beautify-benchmark": "0.2.4", + "deep-equal": "1.0.1", + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.17.1", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "mocha": "6.1.3", + "nyc": "13.3.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.10" + }, + "scripts": { + "bench": "node benchmark/index.js", + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "nyc --reporter=text npm test", + "test-travis": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/pseudomap/LICENSE b/node_modules/pseudomap/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/pseudomap/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pseudomap/README.md b/node_modules/pseudomap/README.md new file mode 100644 index 00000000..778bf01d --- /dev/null +++ b/node_modules/pseudomap/README.md @@ -0,0 +1,60 @@ +# pseudomap + +A thing that is a lot like ES6 `Map`, but without iterators, for use +in environments where `for..of` syntax and `Map` are not available. + +If you need iterators, or just in general a more faithful polyfill to +ES6 Maps, check out [es6-map](http://npm.im/es6-map). + +If you are in an environment where `Map` is supported, then that will +be returned instead, unless `process.env.TEST_PSEUDOMAP` is set. + +You can use any value as keys, and any value as data. Setting again +with the identical key will overwrite the previous value. + +Internally, data is stored on an `Object.create(null)` style object. +The key is coerced to a string to generate the key on the internal +data-bag object. The original key used is stored along with the data. + +In the event of a stringified-key collision, a new key is generated by +appending an increasing number to the stringified-key until finding +either the intended key or an empty spot. + +Note that because object traversal order of plain objects is not +guaranteed to be identical to insertion order, the insertion order +guarantee of `Map.prototype.forEach` is not guaranteed in this +implementation. However, in all versions of Node.js and V8 where this +module works, `forEach` does traverse data in insertion order. + +## API + +Most of the [Map +API](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map), +with the following exceptions: + +1. A `Map` object is not an iterator. +2. `values`, `keys`, and `entries` methods are not implemented, + because they return iterators. +3. The argument to the constructor can be an Array of `[key, value]` + pairs, or a `Map` or `PseudoMap` object. But, since iterators + aren't used, passing any plain-old iterator won't initialize the + map properly. + +## USAGE + +Use just like a regular ES6 Map. + +```javascript +var PseudoMap = require('pseudomap') + +// optionally provide a pseudomap, or an array of [key,value] pairs +// as the argument to initialize the map with +var myMap = new PseudoMap() + +myMap.set(1, 'number 1') +myMap.set('1', 'string 1') +var akey = {} +var bkey = {} +myMap.set(akey, { some: 'data' }) +myMap.set(bkey, { some: 'other data' }) +``` diff --git a/node_modules/pseudomap/map.js b/node_modules/pseudomap/map.js new file mode 100644 index 00000000..7db15994 --- /dev/null +++ b/node_modules/pseudomap/map.js @@ -0,0 +1,9 @@ +if (process.env.npm_package_name === 'pseudomap' && + process.env.npm_lifecycle_script === 'test') + process.env.TEST_PSEUDOMAP = 'true' + +if (typeof Map === 'function' && !process.env.TEST_PSEUDOMAP) { + module.exports = Map +} else { + module.exports = require('./pseudomap') +} diff --git a/node_modules/pseudomap/package.json b/node_modules/pseudomap/package.json new file mode 100644 index 00000000..4b02ab7c --- /dev/null +++ b/node_modules/pseudomap/package.json @@ -0,0 +1,25 @@ +{ + "name": "pseudomap", + "version": "1.0.2", + "description": "A thing that is a lot like ES6 `Map`, but without iterators, for use in environments where `for..of` syntax and `Map` are not available.", + "main": "map.js", + "directories": { + "test": "test" + }, + "devDependencies": { + "tap": "^2.3.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/pseudomap.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/isaacs/pseudomap/issues" + }, + "homepage": "https://github.com/isaacs/pseudomap#readme" +} diff --git a/node_modules/pseudomap/pseudomap.js b/node_modules/pseudomap/pseudomap.js new file mode 100644 index 00000000..25a21d82 --- /dev/null +++ b/node_modules/pseudomap/pseudomap.js @@ -0,0 +1,113 @@ +var hasOwnProperty = Object.prototype.hasOwnProperty + +module.exports = PseudoMap + +function PseudoMap (set) { + if (!(this instanceof PseudoMap)) // whyyyyyyy + throw new TypeError("Constructor PseudoMap requires 'new'") + + this.clear() + + if (set) { + if ((set instanceof PseudoMap) || + (typeof Map === 'function' && set instanceof Map)) + set.forEach(function (value, key) { + this.set(key, value) + }, this) + else if (Array.isArray(set)) + set.forEach(function (kv) { + this.set(kv[0], kv[1]) + }, this) + else + throw new TypeError('invalid argument') + } +} + +PseudoMap.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + Object.keys(this._data).forEach(function (k) { + if (k !== 'size') + fn.call(thisp, this._data[k].value, this._data[k].key) + }, this) +} + +PseudoMap.prototype.has = function (k) { + return !!find(this._data, k) +} + +PseudoMap.prototype.get = function (k) { + var res = find(this._data, k) + return res && res.value +} + +PseudoMap.prototype.set = function (k, v) { + set(this._data, k, v) +} + +PseudoMap.prototype.delete = function (k) { + var res = find(this._data, k) + if (res) { + delete this._data[res._index] + this._data.size-- + } +} + +PseudoMap.prototype.clear = function () { + var data = Object.create(null) + data.size = 0 + + Object.defineProperty(this, '_data', { + value: data, + enumerable: false, + configurable: true, + writable: false + }) +} + +Object.defineProperty(PseudoMap.prototype, 'size', { + get: function () { + return this._data.size + }, + set: function (n) {}, + enumerable: true, + configurable: true +}) + +PseudoMap.prototype.values = +PseudoMap.prototype.keys = +PseudoMap.prototype.entries = function () { + throw new Error('iterators are not implemented in this version') +} + +// Either identical, or both NaN +function same (a, b) { + return a === b || a !== a && b !== b +} + +function Entry (k, v, i) { + this.key = k + this.value = v + this._index = i +} + +function find (data, k) { + for (var i = 0, s = '_' + k, key = s; + hasOwnProperty.call(data, key); + key = s + i++) { + if (same(data[key].key, k)) + return data[key] + } +} + +function set (data, k, v) { + for (var i = 0, s = '_' + k, key = s; + hasOwnProperty.call(data, key); + key = s + i++) { + if (same(data[key].key, k)) { + data[key].value = v + return + } + } + data.size++ + data[key] = new Entry(k, v, key) +} diff --git a/node_modules/pseudomap/test/basic.js b/node_modules/pseudomap/test/basic.js new file mode 100644 index 00000000..4378e454 --- /dev/null +++ b/node_modules/pseudomap/test/basic.js @@ -0,0 +1,86 @@ +var t = require('tap') + +process.env.TEST_PSEUDOMAP = 'true' + +var PM = require('../') +runTests(PM) + +// if possible, verify that Map also behaves the same way +if (typeof Map === 'function') + runTests(Map) + + +function runTests (Map) { + t.throws(Map) + + var m = new Map() + + t.equal(m.size, 0) + + m.set(1, '1 string') + t.equal(m.get(1), '1 string') + t.equal(m.size, 1) + m.size = 1000 + t.equal(m.size, 1) + m.size = 0 + t.equal(m.size, 1) + + m = new Map([[1, 'number 1'], ['1', 'string 1']]) + t.equal(m.get(1), 'number 1') + t.equal(m.get('1'), 'string 1') + t.equal(m.size, 2) + + m = new Map(m) + t.equal(m.get(1), 'number 1') + t.equal(m.get('1'), 'string 1') + t.equal(m.size, 2) + + var akey = {} + var bkey = {} + m.set(akey, { some: 'data' }) + m.set(bkey, { some: 'other data' }) + t.same(m.get(akey), { some: 'data' }) + t.same(m.get(bkey), { some: 'other data' }) + t.equal(m.size, 4) + + var x = /x/ + var y = /x/ + m.set(x, 'x regex') + m.set(y, 'y regex') + t.equal(m.get(x), 'x regex') + m.set(x, 'x again') + t.equal(m.get(x), 'x again') + t.equal(m.size, 6) + + m.set(NaN, 'not a number') + t.equal(m.get(NaN), 'not a number') + m.set(NaN, 'it is a ' + typeof NaN) + t.equal(m.get(NaN), 'it is a number') + m.set('NaN', 'stringie nan') + t.equal(m.get(NaN), 'it is a number') + t.equal(m.get('NaN'), 'stringie nan') + t.equal(m.size, 8) + + m.delete(NaN) + t.equal(m.get(NaN), undefined) + t.equal(m.size, 7) + + var expect = [ + { value: 'number 1', key: 1 }, + { value: 'string 1', key: '1' }, + { value: { some: 'data' }, key: {} }, + { value: { some: 'other data' }, key: {} }, + { value: 'x again', key: /x/ }, + { value: 'y regex', key: /x/ }, + { value: 'stringie nan', key: 'NaN' } + ] + var actual = [] + + m.forEach(function (value, key) { + actual.push({ value: value, key: key }) + }) + t.same(actual, expect) + + m.clear() + t.equal(m.size, 0) +} diff --git a/node_modules/pstree.remy/.travis.yml b/node_modules/pstree.remy/.travis.yml new file mode 100644 index 00000000..5bf093ee --- /dev/null +++ b/node_modules/pstree.remy/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +cache: + directories: + - ~/.npm +notifications: + email: false +node_js: + - '8' diff --git a/node_modules/pstree.remy/LICENSE b/node_modules/pstree.remy/LICENSE new file mode 100644 index 00000000..e83bea65 --- /dev/null +++ b/node_modules/pstree.remy/LICENSE @@ -0,0 +1,7 @@ +The MIT License (MIT) +Copyright © 2019 Remy Sharp, https://remysharp.com +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pstree.remy/lib/index.js b/node_modules/pstree.remy/lib/index.js new file mode 100644 index 00000000..6ea7e023 --- /dev/null +++ b/node_modules/pstree.remy/lib/index.js @@ -0,0 +1,32 @@ +const exec = require('child_process').exec; +const tree = require('./tree'); +const utils = require('./utils'); +var hasPS = true; + +// discover if the OS has `ps`, and therefore can use psTree +exec('ps', error => { + module.exports.hasPS = hasPS = !error; +}); + +module.exports = function main(pid, callback) { + if (typeof pid === 'number') { + pid = pid.toString(); + } + + if (hasPS && !process.env.NO_PS) { + return tree(pid, callback); + } + + utils + .getStat() + .then(utils.tree) + .then(tree => utils.pidsForTree(tree, pid)) + .then(res => callback(null, res.map(p => p.PID))) + .catch(error => callback(error)); +}; + +if (!module.parent) { + module.exports(process.argv[2], (e, pids) => console.log(pids)); +} + +module.exports.hasPS = hasPS; diff --git a/node_modules/pstree.remy/lib/tree.js b/node_modules/pstree.remy/lib/tree.js new file mode 100644 index 00000000..d8c49f7d --- /dev/null +++ b/node_modules/pstree.remy/lib/tree.js @@ -0,0 +1,34 @@ +const spawn = require('child_process').spawn; + +module.exports = function(rootPid, callback) { + const tree = {}; + var output = ''; + + // *nix + const ps = spawn('ps', ['-A', '-o', 'ppid,pid']); + ps.stdout.on('data', data => { + output += data.toString('ascii'); + }); + + ps.on('close', () => { + try { + const res = output + .split('\n') + .slice(1) + .map(_ => _.trim()) + .reduce((acc, line) => { + if (line.indexOf(rootPid + ' ') === 0) { + const pid = line.split(/\s+/).pop(); + acc.push(parseInt(pid, 10)); + rootPid = pid; + } + + return acc; + }, []); + + callback(null, res); + } catch (e) { + callback(e, null); + } + }); +}; diff --git a/node_modules/pstree.remy/lib/utils.js b/node_modules/pstree.remy/lib/utils.js new file mode 100644 index 00000000..e438af91 --- /dev/null +++ b/node_modules/pstree.remy/lib/utils.js @@ -0,0 +1,56 @@ +const spawn = require('child_process').spawn; + +module.exports = { tree, pidsForTree, getStat }; + +function getStat() { + return new Promise(resolve => { + const command = `ls /proc | grep -E '^[0-9]+$' | xargs -I{} cat /proc/{}/stat`; + const child = spawn('sh', ['-c', command], { + stdio: ['pipe', 'pipe', 'pipe'], + }); + + var res = ''; + child.stdout.on('data', data => (res += data)); + child.on('close', () => resolve(res)); + }); +} + +function template(s) { + var stat = null; + // 'pid', 'comm', 'state', 'ppid', 'pgrp' + // %d (%s) %c %d %d + s.replace( + /(\d+) \((.*?)\)\s(.+?)\s(\d+)\s/g, + (all, PID, COMMAND, STAT, PPID) => { + stat = { PID, COMMAND, PPID, STAT }; + } + ); + + return stat; +} + +function tree(stats) { + const processes = stats + .split('\n') + .map(template) + .filter(Boolean); + + return processes; +} + +function pidsForTree(tree, pid) { + if (typeof pid === 'number') { + pid = pid.toString(); + } + const parents = [pid]; + const children = []; + + tree.forEach(proc => { + if (parents.indexOf(proc.PPID) !== -1) { + parents.push(proc.PID); + children.push(proc); + } + }); + + return children; +} diff --git a/node_modules/pstree.remy/package.json b/node_modules/pstree.remy/package.json new file mode 100644 index 00000000..f7fed76e --- /dev/null +++ b/node_modules/pstree.remy/package.json @@ -0,0 +1,24 @@ +{ + "name": "pstree.remy", + "version": "1.1.7", + "main": "lib/index.js", + "scripts": { + "test": "tap tests/*.test.js", + "_prepublish": "npm test" + }, + "keywords": [ + "ps", + "pstree", + "ps tree" + ], + "author": "Remy Sharp", + "license": "MIT", + "devDependencies": { + "tap": "^11.0.0" + }, + "directories": { + "test": "tests" + }, + "dependencies": {}, + "description": "Collects the full tree of processes from /proc" +} diff --git a/node_modules/pstree.remy/tests/fixtures/index.js b/node_modules/pstree.remy/tests/fixtures/index.js new file mode 100644 index 00000000..4c171f95 --- /dev/null +++ b/node_modules/pstree.remy/tests/fixtures/index.js @@ -0,0 +1,8 @@ +const spawn = require('child_process').spawn; +const sub = spawn( + 'sh', + ['-c', 'node -e "setInterval(() => console.log(`running`), 200)"'], + { + stdio: 'pipe', + } +); diff --git a/node_modules/pstree.remy/tests/fixtures/out1 b/node_modules/pstree.remy/tests/fixtures/out1 new file mode 100644 index 00000000..abfe5810 --- /dev/null +++ b/node_modules/pstree.remy/tests/fixtures/out1 @@ -0,0 +1,10 @@ +1 (npm) S 0 1 1 34816 1 4210944 11112 0 0 0 45 8 0 0 20 0 10 0 330296 1089871872 11809 18446744073709551615 4194304 29343848 140726436642896 0 0 0 0 4096 2072112895 0 0 0 17 0 0 0 0 0 0 31441000 31537208 37314560 140726436650815 140726436650847 140726436650847 140726436650986 0 +15 (sh) S 1 1 1 34816 1 4210688 115 0 0 0 0 0 0 0 20 0 1 0 330372 4399104 187 18446744073709551615 94374393548800 94374393655428 140722913272992 0 0 0 0 0 65538 0 0 0 17 0 0 0 0 0 0 94374395756424 94374395761184 94374404673536 140722913278928 140722913278959 140722913278959 140722913284080 0 +16 (node) S 15 1 1 34816 1 4210688 6930 103 0 0 32 2 0 0 20 0 10 0 330373 1068478464 8412 18446744073709551615 4194304 29343848 140727228046064 0 0 0 0 4096 134300162 0 0 0 17 1 0 0 1 0 0 31441000 31537208 52584448 140727228050313 140727228050383 140727228050383 140727228055530 0 +27 (sh) S 16 1 1 34816 1 4210688 111 0 0 0 0 0 0 0 20 0 1 0 330410 4399104 193 18446744073709551615 94848235986944 94848236093572 140727019991184 0 0 0 0 0 65538 0 0 0 17 1 0 0 0 0 0 94848238194568 94848238199328 94848261660672 140727019998122 140727019998165 140727019998165 140727020003312 0 +28 (node) S 27 1 1 34816 1 4210688 3576 268 0 0 12 2 0 0 20 0 10 0 330411 930213888 6760 18446744073709551615 4194304 29343848 140726559664992 0 0 0 0 4096 134300162 0 0 0 17 1 0 0 0 0 0 31441000 31537208 32591872 140726559669117 140726559669199 140726559669199 140726559674346 0 +39 (node) S 28 1 1 34816 1 4210688 47517 0 0 0 151 9 0 0 20 0 6 0 330427 985739264 31859 18446744073709551615 4194304 29343848 140737324503920 0 0 0 0 4096 134234626 0 0 0 17 0 0 0 0 0 0 31441000 31537208 51585024 140737324510060 140737324510159 140737324510159 140737324515306 0 +45 (bash) S 0 45 45 34817 50 4210944 752 256 0 0 2 0 0 0 20 0 1 0 331039 18628608 789 18446744073709551615 4194304 5242124 140724425887696 0 0 0 65536 3670020 1266777851 0 0 0 17 1 0 0 0 0 0 7341384 7388228 30310400 140724425891678 140724425891683 140724425891683 140724425891822 0 +cat: /proc/50/stat: No such file or directory +cat: /proc/51/stat: No such file or directory +52 (xargs) S 45 50 45 34817 50 4210688 179 661 0 0 0 0 0 0 20 0 1 0 331544 4608000 346 18446744073709551615 94587588550656 94587588614028 140735223856048 0 0 0 0 0 2560 0 0 0 17 1 0 0 0 0 0 94587590711464 94587590713504 94587603169280 140735223861006 140735223861035 140735223861035 140735223861225 0 diff --git a/node_modules/pstree.remy/tests/fixtures/out2 b/node_modules/pstree.remy/tests/fixtures/out2 new file mode 100644 index 00000000..3b31137d --- /dev/null +++ b/node_modules/pstree.remy/tests/fixtures/out2 @@ -0,0 +1,29 @@ +cat: /proc/4087/stat: No such file or directory +cat: /proc/4088/stat: No such file or directory +1 (init) S 0 1 1 0 -1 4210944 9227 55994 29 319 7 5 68 16 20 0 1 0 1286281 33660928 855 18446744073709551615 1 1 0 0 0 0 0 4096 536962595 0 0 0 17 4 0 0 3 0 0 0 0 0 0 0 0 0 0 +1032 (ntpd) S 1 1032 1032 0 -1 4211008 178 0 1 0 0 0 0 0 20 0 1 0 1287033 25743360 1058 18446744073709551615 1 1 0 0 0 0 0 4096 27207 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +126 (irqbalance) S 1 126 126 0 -1 1077952832 1217 0 0 0 1 6 0 0 20 0 1 0 1286749 20189184 647 18446744073709551615 1 1 0 0 0 0 0 0 3 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +181 (mysqld) S 1 181 181 0 -1 4210944 6399 0 46 0 8 6 0 0 20 0 22 0 1286761 748453888 14476 18446744073709551615 1 1 0 0 0 0 552967 4096 26345 0 0 0 17 4 0 0 10 0 0 0 0 0 0 0 0 0 0 +194 (memcached) S 1 187 187 0 -1 4210944 252 0 4 0 0 0 0 0 20 0 6 0 1286766 333221888 648 18446744073709551615 1 1 0 0 0 0 0 4096 2 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +243 (dbus-daemon) S 1 243 243 0 -1 4211008 67 0 0 0 0 0 0 0 20 0 1 0 1286779 40087552 598 18446744073709551615 1 1 0 0 0 0 0 0 16385 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +254 (rsyslogd) S 1 254 254 0 -1 4211008 107 0 0 0 2 2 0 0 20 0 3 0 1286782 186601472 696 18446744073709551615 1 1 0 0 0 0 0 16781830 1133601 0 0 0 17 5 0 0 0 0 0 0 0 0 0 0 0 0 0 +265 (systemd-logind) S 1 265 265 0 -1 4210944 276 0 2 0 0 0 0 0 20 0 1 0 1286786 35880960 720 18446744073709551615 1 1 0 0 0 0 0 0 0 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +333 (postgres) S 1 303 303 0 -1 4210688 3169 3466 15 18 0 1 1 1 20 0 1 0 1286817 156073984 5002 18446744073709551615 1 1 0 0 0 0 0 19935232 84487 0 0 0 17 5 0 0 1 0 0 0 0 0 0 0 0 0 0 +359 (postgres) S 333 359 359 0 -1 4210752 90 0 0 0 0 0 0 0 20 0 1 0 1286822 156073984 827 18446744073709551615 1 1 0 0 0 0 0 16805888 2567 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +360 (postgres) S 333 360 360 0 -1 4210752 119 0 0 0 0 0 0 0 20 0 1 0 1286822 156073984 827 18446744073709551615 1 1 0 0 0 0 0 16791554 16901 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +361 (postgres) S 333 361 361 0 -1 4210752 87 0 0 0 0 0 0 0 20 0 1 0 1286822 156073984 827 18446744073709551615 1 1 0 0 0 0 0 16791552 16903 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +362 (postgres) S 333 362 362 0 -1 4210752 292 0 3 0 0 0 0 0 20 0 1 0 1286822 156930048 1373 18446744073709551615 1 1 0 0 0 0 0 19927040 27271 0 0 0 17 5 0 0 0 0 0 0 0 0 0 0 0 0 0 +363 (postgres) S 333 363 363 0 -1 4210752 82 0 0 0 0 0 0 0 20 0 1 0 1286822 115924992 887 18446744073709551615 1 1 0 0 0 0 0 16808450 5 0 0 0 17 5 0 0 0 0 0 0 0 0 0 0 0 0 0 +4050 (npm) S 50 50 50 34817 50 4210688 5109 0 0 0 36 3 0 0 20 0 10 0 1292968 738025472 10051 18446744073709551615 4194304 33165900 140723623956256 0 0 0 0 4096 134300162 0 0 0 17 4 0 0 0 0 0 35263056 35370992 48369664 140723623964237 140723623964294 140723623964294 140723623968712 0 +4060 (sh) S 4050 50 50 34817 50 4210688 121 0 0 0 0 0 0 0 20 0 1 0 1293007 4579328 174 18446744073709551615 94347643936768 94347644049516 140735136055088 0 0 0 0 0 65538 1 0 0 17 5 0 0 0 0 0 94347646148008 94347646153216 94347660038144 140735136063095 140735136063129 140735136063129 140735136071664 0 +4061 (node) S 4060 50 50 34817 50 4210688 6501 0 0 0 42 2 0 0 20 0 6 0 1293008 705769472 10211 18446744073709551615 4194304 33165900 140730532686288 0 0 0 0 4096 2072111671 0 0 0 17 5 0 0 0 0 0 35263056 35370992 45867008 140730532695579 140730532695657 140730532695657 140730532704200 0 +4067 (node) S 4061 50 50 34817 50 4210688 6746 221 0 0 38 3 0 0 20 0 10 0 1293051 738910208 10527 18446744073709551615 4194304 33165900 140724824971632 0 0 0 0 4096 2072111671 0 0 0 17 4 0 0 0 0 0 35263056 35370992 68595712 140724824980995 140724824981063 140724824981063 140724824989640 0 +4079 (sh) S 4067 50 50 34817 50 4210688 118 0 0 0 0 0 0 0 20 0 1 0 1293092 4579328 194 18446744073709551615 94573702131712 94573702244460 140724712357120 0 0 0 0 0 65538 1 0 0 17 4 0 0 0 0 0 94573704342952 94573704348160 94573718511616 140724712361487 140724712361583 140724712361583 140724712370160 0 +4080 (node) S 4079 50 50 34817 50 4210688 2428 0 0 0 8 1 0 0 20 0 6 0 1293093 693059584 7251 18446744073709551615 4194304 33165900 140726023392816 0 0 0 0 4096 134234626 0 0 0 17 5 0 0 0 0 0 35263056 35370992 55226368 140726023396847 140726023396935 140726023396935 140726023405512 0 +4086 (sh) S 4067 50 50 34817 50 4210688 131 244 0 0 0 0 0 0 20 0 1 0 1293143 4579328 200 18446744073709551615 94347550273536 94347550386284 140737219399136 0 0 0 0 0 65538 1 0 0 17 5 0 0 0 0 0 94347552484776 94347552489984 94347554299904 140737219403308 140737219403375 140737219403375 140737219411952 0 +4089 (xargs) S 4086 50 50 34817 50 4210688 333 1924 0 0 0 0 0 0 20 0 1 0 1293143 17600512 477 18446744073709551615 4194304 4232732 140721633759248 0 0 0 0 0 0 1 0 0 17 5 0 0 0 0 0 6331920 6332980 32182272 140721633762891 140721633762920 140721633762920 140721633771497 0 +50 (bash) S 0 50 50 34817 50 4210944 43914 1032463 9 705 44 21 4213 818 20 0 1 0 1286336 42266624 3599 18446744073709551615 4194304 5173404 140732749083280 0 0 0 65536 4 1132560123 1 0 0 17 4 0 0 410 0 0 7273968 7310504 21196800 140732749086490 140732749086517 140732749086517 140732749086702 0 +79 (acpid) S 1 79 79 0 -1 4210752 46 0 0 0 0 0 0 0 20 0 1 0 1286717 4493312 407 18446744073709551615 1 1 0 0 0 0 0 4096 16391 0 0 0 17 5 0 0 0 0 0 0 0 0 0 0 0 0 0 +83 (sshd) S 1 83 83 0 -1 4210944 354 0 27 0 0 0 0 0 20 0 1 0 1286718 62873600 1290 18446744073709551615 1 1 0 0 0 0 0 4096 81925 0 0 0 17 4 0 0 30 0 0 0 0 0 0 0 0 0 0 +94 (cron) S 1 94 94 0 -1 1077952576 103 449 0 1 0 0 0 0 20 0 1 0 1286743 24240128 559 18446744073709551615 1 1 0 0 0 0 0 0 65537 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 +95 (atd) S 1 95 95 0 -1 1077952576 28 0 0 0 0 0 0 0 20 0 1 0 1286743 19615744 41 18446744073709551615 1 1 0 0 0 0 0 0 81923 0 0 0 17 4 0 0 0 0 0 0 0 0 0 0 0 0 0 diff --git a/node_modules/pstree.remy/tests/index.test.js b/node_modules/pstree.remy/tests/index.test.js new file mode 100644 index 00000000..0bc3053f --- /dev/null +++ b/node_modules/pstree.remy/tests/index.test.js @@ -0,0 +1,40 @@ +const tap = require('tap'); +const test = tap.test; +const readFile = require('fs').readFileSync; +const spawn = require('child_process').spawn; +const pstree = require('../'); +const { tree, pidsForTree, getStat } = require('../lib/utils'); + +if (process.platform !== 'darwin') { + test('reads from /proc', async t => { + const ps = await getStat(); + t.ok(ps.split('\n').length > 1); + }); +} + +test('tree for live env', async t => { + const pid = 4079; + const fixture = readFile(__dirname + '/fixtures/out2', 'utf8'); + const ps = await tree(fixture); + t.deepEqual(pidsForTree(ps, pid).map(_ => _.PID), ['4080']); +}); + +test('can read full child process tree', t => { + const sub = spawn('node', [`${__dirname}/fixtures/index.js`], { + stdio: 'pipe', + }); + setTimeout(() => { + const pid = sub.pid; + + pstree(pid, (error, children) => { + children.concat([pid]).forEach(p => { + spawn('kill', ['-s', 'SIGTERM', p]); + }); + + console.log(children); + + t.equal(children.length, 2); + t.end(); + }); + }, 1000); +}); diff --git a/node_modules/qs/.editorconfig b/node_modules/qs/.editorconfig new file mode 100644 index 00000000..a4893ddf --- /dev/null +++ b/node_modules/qs/.editorconfig @@ -0,0 +1,30 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 160 + +[test/*] +max_line_length = off + +[*.md] +max_line_length = off + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[LICENSE] +indent_size = 2 +max_line_length = off diff --git a/node_modules/qs/.eslintignore b/node_modules/qs/.eslintignore new file mode 100644 index 00000000..1521c8b7 --- /dev/null +++ b/node_modules/qs/.eslintignore @@ -0,0 +1 @@ +dist diff --git a/node_modules/qs/.eslintrc b/node_modules/qs/.eslintrc new file mode 100644 index 00000000..e3bde898 --- /dev/null +++ b/node_modules/qs/.eslintrc @@ -0,0 +1,21 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "complexity": 0, + "consistent-return": 1, + "func-name-matching": 0, + "id-length": [2, { "min": 1, "max": 25, "properties": "never" }], + "indent": [2, 4], + "max-lines-per-function": [2, { "max": 150 }], + "max-params": [2, 14], + "max-statements": [2, 52], + "multiline-comment-style": 0, + "no-continue": 1, + "no-magic-numbers": 0, + "no-restricted-syntax": [2, "BreakStatement", "DebuggerStatement", "ForInStatement", "LabeledStatement", "WithStatement"], + "operator-linebreak": [2, "before"], + } +} diff --git a/node_modules/qs/CHANGELOG.md b/node_modules/qs/CHANGELOG.md new file mode 100644 index 00000000..50505c46 --- /dev/null +++ b/node_modules/qs/CHANGELOG.md @@ -0,0 +1,256 @@ +## **6.7.0** +- [New] `stringify`/`parse`: add `comma` as an `arrayFormat` option (#276, #219) +- [Fix] correctly parse nested arrays (#212) +- [Fix] `utils.merge`: avoid a crash with a null target and a truthy non-array source, also with an array source +- [Robustness] `stringify`: cache `Object.prototype.hasOwnProperty` +- [Refactor] `utils`: `isBuffer`: small tweak; add tests +- [Refactor] use cached `Array.isArray` +- [Refactor] `parse`/`stringify`: make a function to normalize the options +- [Refactor] `utils`: reduce observable [[Get]]s +- [Refactor] `stringify`/`utils`: cache `Array.isArray` +- [Tests] always use `String(x)` over `x.toString()` +- [Tests] fix Buffer tests to work in node < 4.5 and node < 5.10 +- [Tests] temporarily allow coverage to fail + +## **6.6.0** +- [New] Add support for iso-8859-1, utf8 "sentinel" and numeric entities (#268) +- [New] move two-value combine to a `utils` function (#189) +- [Fix] `stringify`: fix a crash with `strictNullHandling` and a custom `filter`/`serializeDate` (#279) +- [Fix] when `parseArrays` is false, properly handle keys ending in `[]` (#260) +- [Fix] `stringify`: do not crash in an obscure combo of `interpretNumericEntities`, a bad custom `decoder`, & `iso-8859-1` +- [Fix] `utils`: `merge`: fix crash when `source` is a truthy primitive & no options are provided +- [refactor] `stringify`: Avoid arr = arr.concat(...), push to the existing instance (#269) +- [Refactor] `parse`: only need to reassign the var once +- [Refactor] `parse`/`stringify`: clean up `charset` options checking; fix defaults +- [Refactor] add missing defaults +- [Refactor] `parse`: one less `concat` call +- [Refactor] `utils`: `compactQueue`: make it explicitly side-effecting +- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`, `iconv-lite`, `safe-publish-latest`, `tape` +- [Tests] up to `node` `v10.10`, `v9.11`, `v8.12`, `v6.14`, `v4.9`; pin included builds to LTS + +## **6.5.2** +- [Fix] use `safer-buffer` instead of `Buffer` constructor +- [Refactor] utils: `module.exports` one thing, instead of mutating `exports` (#230) +- [Dev Deps] update `browserify`, `eslint`, `iconv-lite`, `safer-buffer`, `tape`, `browserify` + +## **6.5.1** +- [Fix] Fix parsing & compacting very deep objects (#224) +- [Refactor] name utils functions +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` +- [Tests] up to `node` `v8.4`; use `nvm install-latest-npm` so newer npm doesn’t break older node +- [Tests] Use precise dist for Node.js 0.6 runtime (#225) +- [Tests] make 0.6 required, now that it’s passing +- [Tests] on `node` `v8.2`; fix npm on node 0.6 + +## **6.5.0** +- [New] add `utils.assign` +- [New] pass default encoder/decoder to custom encoder/decoder functions (#206) +- [New] `parse`/`stringify`: add `ignoreQueryPrefix`/`addQueryPrefix` options, respectively (#213) +- [Fix] Handle stringifying empty objects with addQueryPrefix (#217) +- [Fix] do not mutate `options` argument (#207) +- [Refactor] `parse`: cache index to reuse in else statement (#182) +- [Docs] add various badges to readme (#208) +- [Dev Deps] update `eslint`, `browserify`, `iconv-lite`, `tape` +- [Tests] up to `node` `v8.1`, `v7.10`, `v6.11`; npm v4.6 breaks on node < v1; npm v5+ breaks on node < v4 +- [Tests] add `editorconfig-tools` + +## **6.4.0** +- [New] `qs.stringify`: add `encodeValuesOnly` option +- [Fix] follow `allowPrototypes` option during merge (#201, #201) +- [Fix] support keys starting with brackets (#202, #200) +- [Fix] chmod a-x +- [Dev Deps] update `eslint` +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds +- [eslint] reduce warnings + +## **6.3.2** +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Dev Deps] update `eslint` +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.3.1** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties (thanks, @snyk!) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `iconv-lite`, `qs-iconv`, `tape` +- [Tests] on all node minors; improve test matrix +- [Docs] document stringify option `allowDots` (#195) +- [Docs] add empty object and array values example (#195) +- [Docs] Fix minor inconsistency/typo (#192) +- [Docs] document stringify option `sort` (#191) +- [Refactor] `stringify`: throw faster with an invalid encoder +- [Refactor] remove unnecessary escapes (#184) +- Remove contributing.md, since `qs` is no longer part of `hapi` (#183) + +## **6.3.0** +- [New] Add support for RFC 1738 (#174, #173) +- [New] `stringify`: Add `serializeDate` option to customize Date serialization (#159) +- [Fix] ensure `utils.merge` handles merging two arrays +- [Refactor] only constructors should be capitalized +- [Refactor] capitalized var names are for constructors only +- [Refactor] avoid using a sparse array +- [Robustness] `formats`: cache `String#replace` +- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`; add `safe-publish-latest` +- [Tests] up to `node` `v6.8`, `v4.6`; improve test matrix +- [Tests] flesh out arrayLimit/arrayFormat tests (#107) +- [Tests] skip Object.create tests when null objects are not available +- [Tests] Turn on eslint for test files (#175) + +## **6.2.3** +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.2.2** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties + +## **6.2.1** +- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values +- [Refactor] Be explicit and use `Object.prototype.hasOwnProperty.call` +- [Tests] remove `parallelshell` since it does not reliably report failures +- [Tests] up to `node` `v6.3`, `v5.12` +- [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config`, `qs-iconv` + +## [**6.2.0**](https://github.com/ljharb/qs/issues?milestone=36&state=closed) +- [New] pass Buffers to the encoder/decoder directly (#161) +- [New] add "encoder" and "decoder" options, for custom param encoding/decoding (#160) +- [Fix] fix compacting of nested sparse arrays (#150) + +## **6.1.2 +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.1.1** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties + +## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=35&state=closed) +- [New] allowDots option for `stringify` (#151) +- [Fix] "sort" option should work at a depth of 3 or more (#151) +- [Fix] Restore `dist` directory; will be removed in v7 (#148) + +## **6.0.4** +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.0.3** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties +- [Fix] Restore `dist` directory; will be removed in v7 (#148) + +## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed) +- Revert ES6 requirement and restore support for node down to v0.8. + +## [**6.0.1**](https://github.com/ljharb/qs/issues?milestone=32&state=closed) +- [**#127**](https://github.com/ljharb/qs/pull/127) Fix engines definition in package.json + +## [**6.0.0**](https://github.com/ljharb/qs/issues?milestone=31&state=closed) +- [**#124**](https://github.com/ljharb/qs/issues/124) Use ES6 and drop support for node < v4 + +## **5.2.1** +- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values + +## [**5.2.0**](https://github.com/ljharb/qs/issues?milestone=30&state=closed) +- [**#64**](https://github.com/ljharb/qs/issues/64) Add option to sort object keys in the query string + +## [**5.1.0**](https://github.com/ljharb/qs/issues?milestone=29&state=closed) +- [**#117**](https://github.com/ljharb/qs/issues/117) make URI encoding stringified results optional +- [**#106**](https://github.com/ljharb/qs/issues/106) Add flag `skipNulls` to optionally skip null values in stringify + +## [**5.0.0**](https://github.com/ljharb/qs/issues?milestone=28&state=closed) +- [**#114**](https://github.com/ljharb/qs/issues/114) default allowDots to false +- [**#100**](https://github.com/ljharb/qs/issues/100) include dist to npm + +## [**4.0.0**](https://github.com/ljharb/qs/issues?milestone=26&state=closed) +- [**#98**](https://github.com/ljharb/qs/issues/98) make returning plain objects and allowing prototype overwriting properties optional + +## [**3.1.0**](https://github.com/ljharb/qs/issues?milestone=24&state=closed) +- [**#89**](https://github.com/ljharb/qs/issues/89) Add option to disable "Transform dot notation to bracket notation" + +## [**3.0.0**](https://github.com/ljharb/qs/issues?milestone=23&state=closed) +- [**#80**](https://github.com/ljharb/qs/issues/80) qs.parse silently drops properties +- [**#77**](https://github.com/ljharb/qs/issues/77) Perf boost +- [**#60**](https://github.com/ljharb/qs/issues/60) Add explicit option to disable array parsing +- [**#74**](https://github.com/ljharb/qs/issues/74) Bad parse when turning array into object +- [**#81**](https://github.com/ljharb/qs/issues/81) Add a `filter` option +- [**#68**](https://github.com/ljharb/qs/issues/68) Fixed issue with recursion and passing strings into objects. +- [**#66**](https://github.com/ljharb/qs/issues/66) Add mixed array and object dot notation support Closes: #47 +- [**#76**](https://github.com/ljharb/qs/issues/76) RFC 3986 +- [**#85**](https://github.com/ljharb/qs/issues/85) No equal sign +- [**#84**](https://github.com/ljharb/qs/issues/84) update license attribute + +## [**2.4.1**](https://github.com/ljharb/qs/issues?milestone=20&state=closed) +- [**#73**](https://github.com/ljharb/qs/issues/73) Property 'hasOwnProperty' of object # is not a function + +## [**2.4.0**](https://github.com/ljharb/qs/issues?milestone=19&state=closed) +- [**#70**](https://github.com/ljharb/qs/issues/70) Add arrayFormat option + +## [**2.3.3**](https://github.com/ljharb/qs/issues?milestone=18&state=closed) +- [**#59**](https://github.com/ljharb/qs/issues/59) make sure array indexes are >= 0, closes #57 +- [**#58**](https://github.com/ljharb/qs/issues/58) make qs usable for browser loader + +## [**2.3.2**](https://github.com/ljharb/qs/issues?milestone=17&state=closed) +- [**#55**](https://github.com/ljharb/qs/issues/55) allow merging a string into an object + +## [**2.3.1**](https://github.com/ljharb/qs/issues?milestone=16&state=closed) +- [**#52**](https://github.com/ljharb/qs/issues/52) Return "undefined" and "false" instead of throwing "TypeError". + +## [**2.3.0**](https://github.com/ljharb/qs/issues?milestone=15&state=closed) +- [**#50**](https://github.com/ljharb/qs/issues/50) add option to omit array indices, closes #46 + +## [**2.2.5**](https://github.com/ljharb/qs/issues?milestone=14&state=closed) +- [**#39**](https://github.com/ljharb/qs/issues/39) Is there an alternative to Buffer.isBuffer? +- [**#49**](https://github.com/ljharb/qs/issues/49) refactor utils.merge, fixes #45 +- [**#41**](https://github.com/ljharb/qs/issues/41) avoid browserifying Buffer, for #39 + +## [**2.2.4**](https://github.com/ljharb/qs/issues?milestone=13&state=closed) +- [**#38**](https://github.com/ljharb/qs/issues/38) how to handle object keys beginning with a number + +## [**2.2.3**](https://github.com/ljharb/qs/issues?milestone=12&state=closed) +- [**#37**](https://github.com/ljharb/qs/issues/37) parser discards first empty value in array +- [**#36**](https://github.com/ljharb/qs/issues/36) Update to lab 4.x + +## [**2.2.2**](https://github.com/ljharb/qs/issues?milestone=11&state=closed) +- [**#33**](https://github.com/ljharb/qs/issues/33) Error when plain object in a value +- [**#34**](https://github.com/ljharb/qs/issues/34) use Object.prototype.hasOwnProperty.call instead of obj.hasOwnProperty +- [**#24**](https://github.com/ljharb/qs/issues/24) Changelog? Semver? + +## [**2.2.1**](https://github.com/ljharb/qs/issues?milestone=10&state=closed) +- [**#32**](https://github.com/ljharb/qs/issues/32) account for circular references properly, closes #31 +- [**#31**](https://github.com/ljharb/qs/issues/31) qs.parse stackoverflow on circular objects + +## [**2.2.0**](https://github.com/ljharb/qs/issues?milestone=9&state=closed) +- [**#26**](https://github.com/ljharb/qs/issues/26) Don't use Buffer global if it's not present +- [**#30**](https://github.com/ljharb/qs/issues/30) Bug when merging non-object values into arrays +- [**#29**](https://github.com/ljharb/qs/issues/29) Don't call Utils.clone at the top of Utils.merge +- [**#23**](https://github.com/ljharb/qs/issues/23) Ability to not limit parameters? + +## [**2.1.0**](https://github.com/ljharb/qs/issues?milestone=8&state=closed) +- [**#22**](https://github.com/ljharb/qs/issues/22) Enable using a RegExp as delimiter + +## [**2.0.0**](https://github.com/ljharb/qs/issues?milestone=7&state=closed) +- [**#18**](https://github.com/ljharb/qs/issues/18) Why is there arrayLimit? +- [**#20**](https://github.com/ljharb/qs/issues/20) Configurable parametersLimit +- [**#21**](https://github.com/ljharb/qs/issues/21) make all limits optional, for #18, for #20 + +## [**1.2.2**](https://github.com/ljharb/qs/issues?milestone=6&state=closed) +- [**#19**](https://github.com/ljharb/qs/issues/19) Don't overwrite null values + +## [**1.2.1**](https://github.com/ljharb/qs/issues?milestone=5&state=closed) +- [**#16**](https://github.com/ljharb/qs/issues/16) ignore non-string delimiters +- [**#15**](https://github.com/ljharb/qs/issues/15) Close code block + +## [**1.2.0**](https://github.com/ljharb/qs/issues?milestone=4&state=closed) +- [**#12**](https://github.com/ljharb/qs/issues/12) Add optional delim argument +- [**#13**](https://github.com/ljharb/qs/issues/13) fix #11: flattened keys in array are now correctly parsed + +## [**1.1.0**](https://github.com/ljharb/qs/issues?milestone=3&state=closed) +- [**#7**](https://github.com/ljharb/qs/issues/7) Empty values of a POST array disappear after being submitted +- [**#9**](https://github.com/ljharb/qs/issues/9) Should not omit equals signs (=) when value is null +- [**#6**](https://github.com/ljharb/qs/issues/6) Minor grammar fix in README + +## [**1.0.2**](https://github.com/ljharb/qs/issues?milestone=2&state=closed) +- [**#5**](https://github.com/ljharb/qs/issues/5) array holes incorrectly copied into object on large index diff --git a/node_modules/qs/LICENSE b/node_modules/qs/LICENSE new file mode 100644 index 00000000..d4569487 --- /dev/null +++ b/node_modules/qs/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2014 Nathan LaFreniere and other contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * The names of any contributors may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + * * * + +The complete list of contributors can be found at: https://github.com/hapijs/qs/graphs/contributors diff --git a/node_modules/qs/README.md b/node_modules/qs/README.md new file mode 100644 index 00000000..8590cfd3 --- /dev/null +++ b/node_modules/qs/README.md @@ -0,0 +1,570 @@ +# qs [![Version Badge][2]][1] + +[![Build Status][3]][4] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +A querystring parsing and stringifying library with some added security. + +Lead Maintainer: [Jordan Harband](https://github.com/ljharb) + +The **qs** module was originally created and maintained by [TJ Holowaychuk](https://github.com/visionmedia/node-querystring). + +## Usage + +```javascript +var qs = require('qs'); +var assert = require('assert'); + +var obj = qs.parse('a=c'); +assert.deepEqual(obj, { a: 'c' }); + +var str = qs.stringify(obj); +assert.equal(str, 'a=c'); +``` + +### Parsing Objects + +[](#preventEval) +```javascript +qs.parse(string, [options]); +``` + +**qs** allows you to create nested objects within your query strings, by surrounding the name of sub-keys with square brackets `[]`. +For example, the string `'foo[bar]=baz'` converts to: + +```javascript +assert.deepEqual(qs.parse('foo[bar]=baz'), { + foo: { + bar: 'baz' + } +}); +``` + +When using the `plainObjects` option the parsed value is returned as a null object, created via `Object.create(null)` and as such you should be aware that prototype methods will not exist on it and a user may set those names to whatever value they like: + +```javascript +var nullObject = qs.parse('a[hasOwnProperty]=b', { plainObjects: true }); +assert.deepEqual(nullObject, { a: { hasOwnProperty: 'b' } }); +``` + +By default parameters that would overwrite properties on the object prototype are ignored, if you wish to keep the data from those fields either use `plainObjects` as mentioned above, or set `allowPrototypes` to `true` which will allow user input to overwrite those properties. *WARNING* It is generally a bad idea to enable this option as it can cause problems when attempting to use the properties that have been overwritten. Always be careful with this option. + +```javascript +var protoObject = qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }); +assert.deepEqual(protoObject, { a: { hasOwnProperty: 'b' } }); +``` + +URI encoded strings work too: + +```javascript +assert.deepEqual(qs.parse('a%5Bb%5D=c'), { + a: { b: 'c' } +}); +``` + +You can also nest your objects, like `'foo[bar][baz]=foobarbaz'`: + +```javascript +assert.deepEqual(qs.parse('foo[bar][baz]=foobarbaz'), { + foo: { + bar: { + baz: 'foobarbaz' + } + } +}); +``` + +By default, when nesting objects **qs** will only parse up to 5 children deep. This means if you attempt to parse a string like +`'a[b][c][d][e][f][g][h][i]=j'` your resulting object will be: + +```javascript +var expected = { + a: { + b: { + c: { + d: { + e: { + f: { + '[g][h][i]': 'j' + } + } + } + } + } + } +}; +var string = 'a[b][c][d][e][f][g][h][i]=j'; +assert.deepEqual(qs.parse(string), expected); +``` + +This depth can be overridden by passing a `depth` option to `qs.parse(string, [options])`: + +```javascript +var deep = qs.parse('a[b][c][d][e][f][g][h][i]=j', { depth: 1 }); +assert.deepEqual(deep, { a: { b: { '[c][d][e][f][g][h][i]': 'j' } } }); +``` + +The depth limit helps mitigate abuse when **qs** is used to parse user input, and it is recommended to keep it a reasonably small number. + +For similar reasons, by default **qs** will only parse up to 1000 parameters. This can be overridden by passing a `parameterLimit` option: + +```javascript +var limited = qs.parse('a=b&c=d', { parameterLimit: 1 }); +assert.deepEqual(limited, { a: 'b' }); +``` + +To bypass the leading question mark, use `ignoreQueryPrefix`: + +```javascript +var prefixed = qs.parse('?a=b&c=d', { ignoreQueryPrefix: true }); +assert.deepEqual(prefixed, { a: 'b', c: 'd' }); +``` + +An optional delimiter can also be passed: + +```javascript +var delimited = qs.parse('a=b;c=d', { delimiter: ';' }); +assert.deepEqual(delimited, { a: 'b', c: 'd' }); +``` + +Delimiters can be a regular expression too: + +```javascript +var regexed = qs.parse('a=b;c=d,e=f', { delimiter: /[;,]/ }); +assert.deepEqual(regexed, { a: 'b', c: 'd', e: 'f' }); +``` + +Option `allowDots` can be used to enable dot notation: + +```javascript +var withDots = qs.parse('a.b=c', { allowDots: true }); +assert.deepEqual(withDots, { a: { b: 'c' } }); +``` + +If you have to deal with legacy browsers or services, there's +also support for decoding percent-encoded octets as iso-8859-1: + +```javascript +var oldCharset = qs.parse('a=%A7', { charset: 'iso-8859-1' }); +assert.deepEqual(oldCharset, { a: '§' }); +``` + +Some services add an initial `utf8=✓` value to forms so that old +Internet Explorer versions are more likely to submit the form as +utf-8. Additionally, the server can check the value against wrong +encodings of the checkmark character and detect that a query string +or `application/x-www-form-urlencoded` body was *not* sent as +utf-8, eg. if the form had an `accept-charset` parameter or the +containing page had a different character set. + +**qs** supports this mechanism via the `charsetSentinel` option. +If specified, the `utf8` parameter will be omitted from the +returned object. It will be used to switch to `iso-8859-1`/`utf-8` +mode depending on how the checkmark is encoded. + +**Important**: When you specify both the `charset` option and the +`charsetSentinel` option, the `charset` will be overridden when +the request contains a `utf8` parameter from which the actual +charset can be deduced. In that sense the `charset` will behave +as the default charset rather than the authoritative charset. + +```javascript +var detectedAsUtf8 = qs.parse('utf8=%E2%9C%93&a=%C3%B8', { + charset: 'iso-8859-1', + charsetSentinel: true +}); +assert.deepEqual(detectedAsUtf8, { a: 'ø' }); + +// Browsers encode the checkmark as ✓ when submitting as iso-8859-1: +var detectedAsIso8859_1 = qs.parse('utf8=%26%2310003%3B&a=%F8', { + charset: 'utf-8', + charsetSentinel: true +}); +assert.deepEqual(detectedAsIso8859_1, { a: 'ø' }); +``` + +If you want to decode the `&#...;` syntax to the actual character, +you can specify the `interpretNumericEntities` option as well: + +```javascript +var detectedAsIso8859_1 = qs.parse('a=%26%239786%3B', { + charset: 'iso-8859-1', + interpretNumericEntities: true +}); +assert.deepEqual(detectedAsIso8859_1, { a: '☺' }); +``` + +It also works when the charset has been detected in `charsetSentinel` +mode. + +### Parsing Arrays + +**qs** can also parse arrays using a similar `[]` notation: + +```javascript +var withArray = qs.parse('a[]=b&a[]=c'); +assert.deepEqual(withArray, { a: ['b', 'c'] }); +``` + +You may specify an index as well: + +```javascript +var withIndexes = qs.parse('a[1]=c&a[0]=b'); +assert.deepEqual(withIndexes, { a: ['b', 'c'] }); +``` + +Note that the only difference between an index in an array and a key in an object is that the value between the brackets must be a number +to create an array. When creating arrays with specific indices, **qs** will compact a sparse array to only the existing values preserving +their order: + +```javascript +var noSparse = qs.parse('a[1]=b&a[15]=c'); +assert.deepEqual(noSparse, { a: ['b', 'c'] }); +``` + +Note that an empty string is also a value, and will be preserved: + +```javascript +var withEmptyString = qs.parse('a[]=&a[]=b'); +assert.deepEqual(withEmptyString, { a: ['', 'b'] }); + +var withIndexedEmptyString = qs.parse('a[0]=b&a[1]=&a[2]=c'); +assert.deepEqual(withIndexedEmptyString, { a: ['b', '', 'c'] }); +``` + +**qs** will also limit specifying indices in an array to a maximum index of `20`. Any array members with an index of greater than `20` will +instead be converted to an object with the index as the key. This is needed to handle cases when someone sent, for example, `a[999999999]` and it will take significant time to iterate over this huge array. + +```javascript +var withMaxIndex = qs.parse('a[100]=b'); +assert.deepEqual(withMaxIndex, { a: { '100': 'b' } }); +``` + +This limit can be overridden by passing an `arrayLimit` option: + +```javascript +var withArrayLimit = qs.parse('a[1]=b', { arrayLimit: 0 }); +assert.deepEqual(withArrayLimit, { a: { '1': 'b' } }); +``` + +To disable array parsing entirely, set `parseArrays` to `false`. + +```javascript +var noParsingArrays = qs.parse('a[]=b', { parseArrays: false }); +assert.deepEqual(noParsingArrays, { a: { '0': 'b' } }); +``` + +If you mix notations, **qs** will merge the two items into an object: + +```javascript +var mixedNotation = qs.parse('a[0]=b&a[b]=c'); +assert.deepEqual(mixedNotation, { a: { '0': 'b', b: 'c' } }); +``` + +You can also create arrays of objects: + +```javascript +var arraysOfObjects = qs.parse('a[][b]=c'); +assert.deepEqual(arraysOfObjects, { a: [{ b: 'c' }] }); +``` + +Some people use comma to join array, **qs** can parse it: +```javascript +var arraysOfObjects = qs.parse('a=b,c', { comma: true }) +assert.deepEqual(arraysOfObjects, { a: ['b', 'c'] }) +``` +(_this cannot convert nested objects, such as `a={b:1},{c:d}`_) + +### Stringifying + +[](#preventEval) +```javascript +qs.stringify(object, [options]); +``` + +When stringifying, **qs** by default URI encodes output. Objects are stringified as you would expect: + +```javascript +assert.equal(qs.stringify({ a: 'b' }), 'a=b'); +assert.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); +``` + +This encoding can be disabled by setting the `encode` option to `false`: + +```javascript +var unencoded = qs.stringify({ a: { b: 'c' } }, { encode: false }); +assert.equal(unencoded, 'a[b]=c'); +``` + +Encoding can be disabled for keys by setting the `encodeValuesOnly` option to `true`: +```javascript +var encodedValues = qs.stringify( + { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, + { encodeValuesOnly: true } +); +assert.equal(encodedValues,'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h'); +``` + +This encoding can also be replaced by a custom encoding method set as `encoder` option: + +```javascript +var encoded = qs.stringify({ a: { b: 'c' } }, { encoder: function (str) { + // Passed in values `a`, `b`, `c` + return // Return encoded string +}}) +``` + +_(Note: the `encoder` option does not apply if `encode` is `false`)_ + +Analogue to the `encoder` there is a `decoder` option for `parse` to override decoding of properties and values: + +```javascript +var decoded = qs.parse('x=z', { decoder: function (str) { + // Passed in values `x`, `z` + return // Return decoded string +}}) +``` + +Examples beyond this point will be shown as though the output is not URI encoded for clarity. Please note that the return values in these cases *will* be URI encoded during real usage. + +When arrays are stringified, by default they are given explicit indices: + +```javascript +qs.stringify({ a: ['b', 'c', 'd'] }); +// 'a[0]=b&a[1]=c&a[2]=d' +``` + +You may override this by setting the `indices` option to `false`: + +```javascript +qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }); +// 'a=b&a=c&a=d' +``` + +You may use the `arrayFormat` option to specify the format of the output array: + +```javascript +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }) +// 'a[0]=b&a[1]=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }) +// 'a[]=b&a[]=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }) +// 'a=b&a=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'comma' }) +// 'a=b,c' +``` + +When objects are stringified, by default they use bracket notation: + +```javascript +qs.stringify({ a: { b: { c: 'd', e: 'f' } } }); +// 'a[b][c]=d&a[b][e]=f' +``` + +You may override this to use dot notation by setting the `allowDots` option to `true`: + +```javascript +qs.stringify({ a: { b: { c: 'd', e: 'f' } } }, { allowDots: true }); +// 'a.b.c=d&a.b.e=f' +``` + +Empty strings and null values will omit the value, but the equals sign (=) remains in place: + +```javascript +assert.equal(qs.stringify({ a: '' }), 'a='); +``` + +Key with no values (such as an empty object or array) will return nothing: + +```javascript +assert.equal(qs.stringify({ a: [] }), ''); +assert.equal(qs.stringify({ a: {} }), ''); +assert.equal(qs.stringify({ a: [{}] }), ''); +assert.equal(qs.stringify({ a: { b: []} }), ''); +assert.equal(qs.stringify({ a: { b: {}} }), ''); +``` + +Properties that are set to `undefined` will be omitted entirely: + +```javascript +assert.equal(qs.stringify({ a: null, b: undefined }), 'a='); +``` + +The query string may optionally be prepended with a question mark: + +```javascript +assert.equal(qs.stringify({ a: 'b', c: 'd' }, { addQueryPrefix: true }), '?a=b&c=d'); +``` + +The delimiter may be overridden with stringify as well: + +```javascript +assert.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); +``` + +If you only want to override the serialization of `Date` objects, you can provide a `serializeDate` option: + +```javascript +var date = new Date(7); +assert.equal(qs.stringify({ a: date }), 'a=1970-01-01T00:00:00.007Z'.replace(/:/g, '%3A')); +assert.equal( + qs.stringify({ a: date }, { serializeDate: function (d) { return d.getTime(); } }), + 'a=7' +); +``` + +You may use the `sort` option to affect the order of parameter keys: + +```javascript +function alphabeticalSort(a, b) { + return a.localeCompare(b); +} +assert.equal(qs.stringify({ a: 'c', z: 'y', b : 'f' }, { sort: alphabeticalSort }), 'a=c&b=f&z=y'); +``` + +Finally, you can use the `filter` option to restrict which keys will be included in the stringified output. +If you pass a function, it will be called for each key to obtain the replacement value. Otherwise, if you +pass an array, it will be used to select properties and array indices for stringification: + +```javascript +function filterFunc(prefix, value) { + if (prefix == 'b') { + // Return an `undefined` value to omit a property. + return; + } + if (prefix == 'e[f]') { + return value.getTime(); + } + if (prefix == 'e[g][0]') { + return value * 2; + } + return value; +} +qs.stringify({ a: 'b', c: 'd', e: { f: new Date(123), g: [2] } }, { filter: filterFunc }); +// 'a=b&c=d&e[f]=123&e[g][0]=4' +qs.stringify({ a: 'b', c: 'd', e: 'f' }, { filter: ['a', 'e'] }); +// 'a=b&e=f' +qs.stringify({ a: ['b', 'c', 'd'], e: 'f' }, { filter: ['a', 0, 2] }); +// 'a[0]=b&a[2]=d' +``` + +### Handling of `null` values + +By default, `null` values are treated like empty strings: + +```javascript +var withNull = qs.stringify({ a: null, b: '' }); +assert.equal(withNull, 'a=&b='); +``` + +Parsing does not distinguish between parameters with and without equal signs. Both are converted to empty strings. + +```javascript +var equalsInsensitive = qs.parse('a&b='); +assert.deepEqual(equalsInsensitive, { a: '', b: '' }); +``` + +To distinguish between `null` values and empty strings use the `strictNullHandling` flag. In the result string the `null` +values have no `=` sign: + +```javascript +var strictNull = qs.stringify({ a: null, b: '' }, { strictNullHandling: true }); +assert.equal(strictNull, 'a&b='); +``` + +To parse values without `=` back to `null` use the `strictNullHandling` flag: + +```javascript +var parsedStrictNull = qs.parse('a&b=', { strictNullHandling: true }); +assert.deepEqual(parsedStrictNull, { a: null, b: '' }); +``` + +To completely skip rendering keys with `null` values, use the `skipNulls` flag: + +```javascript +var nullsSkipped = qs.stringify({ a: 'b', c: null}, { skipNulls: true }); +assert.equal(nullsSkipped, 'a=b'); +``` + +If you're communicating with legacy systems, you can switch to `iso-8859-1` +using the `charset` option: + +```javascript +var iso = qs.stringify({ æ: 'æ' }, { charset: 'iso-8859-1' }); +assert.equal(iso, '%E6=%E6'); +``` + +Characters that don't exist in `iso-8859-1` will be converted to numeric +entities, similar to what browsers do: + +```javascript +var numeric = qs.stringify({ a: '☺' }, { charset: 'iso-8859-1' }); +assert.equal(numeric, 'a=%26%239786%3B'); +``` + +You can use the `charsetSentinel` option to announce the character by +including an `utf8=✓` parameter with the proper encoding if the checkmark, +similar to what Ruby on Rails and others do when submitting forms. + +```javascript +var sentinel = qs.stringify({ a: '☺' }, { charsetSentinel: true }); +assert.equal(sentinel, 'utf8=%E2%9C%93&a=%E2%98%BA'); + +var isoSentinel = qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' }); +assert.equal(isoSentinel, 'utf8=%26%2310003%3B&a=%E6'); +``` + +### Dealing with special character sets + +By default the encoding and decoding of characters is done in `utf-8`, +and `iso-8859-1` support is also built in via the `charset` parameter. + +If you wish to encode querystrings to a different character set (i.e. +[Shift JIS](https://en.wikipedia.org/wiki/Shift_JIS)) you can use the +[`qs-iconv`](https://github.com/martinheidegger/qs-iconv) library: + +```javascript +var encoder = require('qs-iconv/encoder')('shift_jis'); +var shiftJISEncoded = qs.stringify({ a: 'こんにちは!' }, { encoder: encoder }); +assert.equal(shiftJISEncoded, 'a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I'); +``` + +This also works for decoding of query strings: + +```javascript +var decoder = require('qs-iconv/decoder')('shift_jis'); +var obj = qs.parse('a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I', { decoder: decoder }); +assert.deepEqual(obj, { a: 'こんにちは!' }); +``` + +### RFC 3986 and RFC 1738 space encoding + +RFC3986 used as default option and encodes ' ' to *%20* which is backward compatible. +In the same time, output can be stringified as per RFC1738 with ' ' equal to '+'. + +``` +assert.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); +assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC3986' }), 'a=b%20c'); +assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC1738' }), 'a=b+c'); +``` + +[1]: https://npmjs.org/package/qs +[2]: http://versionbadg.es/ljharb/qs.svg +[3]: https://api.travis-ci.org/ljharb/qs.svg +[4]: https://travis-ci.org/ljharb/qs +[5]: https://david-dm.org/ljharb/qs.svg +[6]: https://david-dm.org/ljharb/qs +[7]: https://david-dm.org/ljharb/qs/dev-status.svg +[8]: https://david-dm.org/ljharb/qs?type=dev +[9]: https://ci.testling.com/ljharb/qs.png +[10]: https://ci.testling.com/ljharb/qs +[11]: https://nodei.co/npm/qs.png?downloads=true&stars=true +[license-image]: http://img.shields.io/npm/l/qs.svg +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/qs.svg +[downloads-url]: http://npm-stat.com/charts.html?package=qs diff --git a/node_modules/qs/dist/qs.js b/node_modules/qs/dist/qs.js new file mode 100644 index 00000000..17f4e600 --- /dev/null +++ b/node_modules/qs/dist/qs.js @@ -0,0 +1,782 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Qs = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i -1) { + val = val.split(','); + } + + if (has.call(obj, key)) { + obj[key] = utils.combine(obj[key], val); + } else { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function (chain, val, options) { + var leaf = val; + + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; + + if (root === '[]' && options.parseArrays) { + obj = [].concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var index = parseInt(cleanRoot, 10); + if (!options.parseArrays && cleanRoot === '') { + obj = { 0: leaf }; + } else if ( + !isNaN(index) + && root !== cleanRoot + && String(index) === cleanRoot + && index >= 0 + && (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = leaf; + } else { + obj[cleanRoot] = leaf; + } + } + + leaf = obj; + } + + return leaf; +}; + +var parseKeys = function parseQueryStringKeys(givenKey, val, options) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; + + // Get the parent + + var segment = brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + + // Stash the parent if it exists + + var keys = []; + if (parent) { + // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(parent); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while ((segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options); +}; + +var normalizeParseOptions = function normalizeParseOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new Error('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; + + return { + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, + arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, + decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, + delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, + depth: typeof opts.depth === 'number' ? opts.depth : defaults.depth, + ignoreQueryPrefix: opts.ignoreQueryPrefix === true, + interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, + parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, + parseArrays: opts.parseArrays !== false, + plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (str, opts) { + var options = normalizeParseOptions(opts); + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = utils.merge(obj, newObj, options); + } + + return utils.compact(obj); +}; + +},{"./utils":5}],4:[function(require,module,exports){ +'use strict'; + +var utils = require('./utils'); +var formats = require('./formats'); +var has = Object.prototype.hasOwnProperty; + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { // eslint-disable-line func-name-matching + return prefix + '[]'; + }, + comma: 'comma', + indices: function indices(prefix, key) { // eslint-disable-line func-name-matching + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { // eslint-disable-line func-name-matching + return prefix; + } +}; + +var isArray = Array.isArray; +var push = Array.prototype.push; +var pushToArray = function (arr, valueOrArray) { + push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); +}; + +var toISO = Date.prototype.toISOString; + +var defaults = { + addQueryPrefix: false, + allowDots: false, + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encoder: utils.encode, + encodeValuesOnly: false, + formatter: formats.formatters[formats['default']], + // deprecated + indices: false, + serializeDate: function serializeDate(date) { // eslint-disable-line func-name-matching + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var stringify = function stringify( // eslint-disable-line func-name-matching + object, + prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset +) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (generateArrayPrefix === 'comma' && isArray(obj)) { + obj = obj.join(','); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset) : prefix; + } + + obj = ''; + } + + if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset); + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (isArray(obj)) { + pushToArray(values, stringify( + obj[key], + typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } else { + pushToArray(values, stringify( + obj[key], + prefix + (allowDots ? '.' + key : '[' + key + ']'), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } + } + + return values; +}; + +var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.encoder !== null && opts.encoder !== undefined && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + var charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var format = formats['default']; + if (typeof opts.format !== 'undefined') { + if (!has.call(formats.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + var formatter = formats.formatters[format]; + + var filter = defaults.filter; + if (typeof opts.filter === 'function' || isArray(opts.filter)) { + filter = opts.filter; + } + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = normalizeStringifyOptions(opts); + + var objKeys; + var filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (opts && opts.arrayFormat in arrayPrefixGenerators) { + arrayFormat = opts.arrayFormat; + } else if (opts && 'indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (options.sort) { + objKeys.sort(options.sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (options.skipNulls && obj[key] === null) { + continue; + } + pushToArray(keys, stringify( + obj[key], + key, + generateArrayPrefix, + options.strictNullHandling, + options.skipNulls, + options.encode ? options.encoder : null, + options.filter, + options.sort, + options.allowDots, + options.serializeDate, + options.formatter, + options.encodeValuesOnly, + options.charset + )); + } + + var joined = keys.join(options.delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +}; + +},{"./formats":1,"./utils":5}],5:[function(require,module,exports){ +'use strict'; + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +var compactQueue = function compactQueue(queue) { + while (queue.length > 1) { + var item = queue.pop(); + var obj = item.obj[item.prop]; + + if (isArray(obj)) { + var compacted = []; + + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + item.obj[item.prop] = compacted; + } + } +}; + +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +var merge = function merge(target, source, options) { + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (isArray(target)) { + target.push(source); + } else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (isArray(target) && !isArray(source)) { + mergeTarget = arrayToObject(target, options); + } + + if (isArray(target) && isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + var targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; + +var decode = function (str, decoder, charset) { + var strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +}; + +var encode = function encode(str, defaultEncoder, charset) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = typeof str === 'string' ? str : String(str); + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D // - + || c === 0x2E // . + || c === 0x5F // _ + || c === 0x7E // ~ + || (c >= 0x30 && c <= 0x39) // 0-9 + || (c >= 0x41 && c <= 0x5A) // a-z + || (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3F)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +var compact = function compact(value) { + var queue = [{ obj: { o: value }, prop: 'o' }]; + var refs = []; + + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + + compactQueue(queue); + + return value; +}; + +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +var isBuffer = function isBuffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +var combine = function combine(a, b) { + return [].concat(a, b); +}; + +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + combine: combine, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + merge: merge +}; + +},{}]},{},[2])(2) +}); diff --git a/node_modules/qs/lib/formats.js b/node_modules/qs/lib/formats.js new file mode 100644 index 00000000..df459975 --- /dev/null +++ b/node_modules/qs/lib/formats.js @@ -0,0 +1,18 @@ +'use strict'; + +var replace = String.prototype.replace; +var percentTwenties = /%20/g; + +module.exports = { + 'default': 'RFC3986', + formatters: { + RFC1738: function (value) { + return replace.call(value, percentTwenties, '+'); + }, + RFC3986: function (value) { + return value; + } + }, + RFC1738: 'RFC1738', + RFC3986: 'RFC3986' +}; diff --git a/node_modules/qs/lib/index.js b/node_modules/qs/lib/index.js new file mode 100644 index 00000000..0d6a97dc --- /dev/null +++ b/node_modules/qs/lib/index.js @@ -0,0 +1,11 @@ +'use strict'; + +var stringify = require('./stringify'); +var parse = require('./parse'); +var formats = require('./formats'); + +module.exports = { + formats: formats, + parse: parse, + stringify: stringify +}; diff --git a/node_modules/qs/lib/parse.js b/node_modules/qs/lib/parse.js new file mode 100644 index 00000000..d81628b5 --- /dev/null +++ b/node_modules/qs/lib/parse.js @@ -0,0 +1,242 @@ +'use strict'; + +var utils = require('./utils'); + +var has = Object.prototype.hasOwnProperty; + +var defaults = { + allowDots: false, + allowPrototypes: false, + arrayLimit: 20, + charset: 'utf-8', + charsetSentinel: false, + comma: false, + decoder: utils.decode, + delimiter: '&', + depth: 5, + ignoreQueryPrefix: false, + interpretNumericEntities: false, + parameterLimit: 1000, + parseArrays: true, + plainObjects: false, + strictNullHandling: false +}; + +var interpretNumericEntities = function (str) { + return str.replace(/&#(\d+);/g, function ($0, numberStr) { + return String.fromCharCode(parseInt(numberStr, 10)); + }); +}; + +// This is what browsers will submit when the ✓ character occurs in an +// application/x-www-form-urlencoded body and the encoding of the page containing +// the form is iso-8859-1, or when the submitted form has an accept-charset +// attribute of iso-8859-1. Presumably also with other charsets that do not contain +// the ✓ character, such as us-ascii. +var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('✓') + +// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded. +var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓') + +var parseValues = function parseQueryStringValues(str, options) { + var obj = {}; + var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; + var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; + var parts = cleanStr.split(options.delimiter, limit); + var skipIndex = -1; // Keep track of where the utf8 sentinel was found + var i; + + var charset = options.charset; + if (options.charsetSentinel) { + for (i = 0; i < parts.length; ++i) { + if (parts[i].indexOf('utf8=') === 0) { + if (parts[i] === charsetSentinel) { + charset = 'utf-8'; + } else if (parts[i] === isoSentinel) { + charset = 'iso-8859-1'; + } + skipIndex = i; + i = parts.length; // The eslint settings do not allow break; + } + } + } + + for (i = 0; i < parts.length; ++i) { + if (i === skipIndex) { + continue; + } + var part = parts[i]; + + var bracketEqualsPos = part.indexOf(']='); + var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; + + var key, val; + if (pos === -1) { + key = options.decoder(part, defaults.decoder, charset); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos), defaults.decoder, charset); + val = options.decoder(part.slice(pos + 1), defaults.decoder, charset); + } + + if (val && options.interpretNumericEntities && charset === 'iso-8859-1') { + val = interpretNumericEntities(val); + } + + if (val && options.comma && val.indexOf(',') > -1) { + val = val.split(','); + } + + if (has.call(obj, key)) { + obj[key] = utils.combine(obj[key], val); + } else { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function (chain, val, options) { + var leaf = val; + + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; + + if (root === '[]' && options.parseArrays) { + obj = [].concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var index = parseInt(cleanRoot, 10); + if (!options.parseArrays && cleanRoot === '') { + obj = { 0: leaf }; + } else if ( + !isNaN(index) + && root !== cleanRoot + && String(index) === cleanRoot + && index >= 0 + && (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = leaf; + } else { + obj[cleanRoot] = leaf; + } + } + + leaf = obj; + } + + return leaf; +}; + +var parseKeys = function parseQueryStringKeys(givenKey, val, options) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; + + // Get the parent + + var segment = brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + + // Stash the parent if it exists + + var keys = []; + if (parent) { + // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(parent); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while ((segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options); +}; + +var normalizeParseOptions = function normalizeParseOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new Error('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; + + return { + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, + arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, + decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, + delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, + depth: typeof opts.depth === 'number' ? opts.depth : defaults.depth, + ignoreQueryPrefix: opts.ignoreQueryPrefix === true, + interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, + parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, + parseArrays: opts.parseArrays !== false, + plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (str, opts) { + var options = normalizeParseOptions(opts); + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = utils.merge(obj, newObj, options); + } + + return utils.compact(obj); +}; diff --git a/node_modules/qs/lib/stringify.js b/node_modules/qs/lib/stringify.js new file mode 100644 index 00000000..7455049c --- /dev/null +++ b/node_modules/qs/lib/stringify.js @@ -0,0 +1,269 @@ +'use strict'; + +var utils = require('./utils'); +var formats = require('./formats'); +var has = Object.prototype.hasOwnProperty; + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { // eslint-disable-line func-name-matching + return prefix + '[]'; + }, + comma: 'comma', + indices: function indices(prefix, key) { // eslint-disable-line func-name-matching + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { // eslint-disable-line func-name-matching + return prefix; + } +}; + +var isArray = Array.isArray; +var push = Array.prototype.push; +var pushToArray = function (arr, valueOrArray) { + push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); +}; + +var toISO = Date.prototype.toISOString; + +var defaults = { + addQueryPrefix: false, + allowDots: false, + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encoder: utils.encode, + encodeValuesOnly: false, + formatter: formats.formatters[formats['default']], + // deprecated + indices: false, + serializeDate: function serializeDate(date) { // eslint-disable-line func-name-matching + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var stringify = function stringify( // eslint-disable-line func-name-matching + object, + prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset +) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (generateArrayPrefix === 'comma' && isArray(obj)) { + obj = obj.join(','); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset) : prefix; + } + + obj = ''; + } + + if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset); + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (isArray(obj)) { + pushToArray(values, stringify( + obj[key], + typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } else { + pushToArray(values, stringify( + obj[key], + prefix + (allowDots ? '.' + key : '[' + key + ']'), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } + } + + return values; +}; + +var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.encoder !== null && opts.encoder !== undefined && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + var charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var format = formats['default']; + if (typeof opts.format !== 'undefined') { + if (!has.call(formats.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + var formatter = formats.formatters[format]; + + var filter = defaults.filter; + if (typeof opts.filter === 'function' || isArray(opts.filter)) { + filter = opts.filter; + } + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = normalizeStringifyOptions(opts); + + var objKeys; + var filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (opts && opts.arrayFormat in arrayPrefixGenerators) { + arrayFormat = opts.arrayFormat; + } else if (opts && 'indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (options.sort) { + objKeys.sort(options.sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (options.skipNulls && obj[key] === null) { + continue; + } + pushToArray(keys, stringify( + obj[key], + key, + generateArrayPrefix, + options.strictNullHandling, + options.skipNulls, + options.encode ? options.encoder : null, + options.filter, + options.sort, + options.allowDots, + options.serializeDate, + options.formatter, + options.encodeValuesOnly, + options.charset + )); + } + + var joined = keys.join(options.delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +}; diff --git a/node_modules/qs/lib/utils.js b/node_modules/qs/lib/utils.js new file mode 100644 index 00000000..1b219cdd --- /dev/null +++ b/node_modules/qs/lib/utils.js @@ -0,0 +1,230 @@ +'use strict'; + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +var compactQueue = function compactQueue(queue) { + while (queue.length > 1) { + var item = queue.pop(); + var obj = item.obj[item.prop]; + + if (isArray(obj)) { + var compacted = []; + + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + item.obj[item.prop] = compacted; + } + } +}; + +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +var merge = function merge(target, source, options) { + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (isArray(target)) { + target.push(source); + } else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (isArray(target) && !isArray(source)) { + mergeTarget = arrayToObject(target, options); + } + + if (isArray(target) && isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + var targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; + +var decode = function (str, decoder, charset) { + var strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +}; + +var encode = function encode(str, defaultEncoder, charset) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = typeof str === 'string' ? str : String(str); + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D // - + || c === 0x2E // . + || c === 0x5F // _ + || c === 0x7E // ~ + || (c >= 0x30 && c <= 0x39) // 0-9 + || (c >= 0x41 && c <= 0x5A) // a-z + || (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3F)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +var compact = function compact(value) { + var queue = [{ obj: { o: value }, prop: 'o' }]; + var refs = []; + + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + + compactQueue(queue); + + return value; +}; + +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +var isBuffer = function isBuffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +var combine = function combine(a, b) { + return [].concat(a, b); +}; + +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + combine: combine, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + merge: merge +}; diff --git a/node_modules/qs/package.json b/node_modules/qs/package.json new file mode 100644 index 00000000..28d98a11 --- /dev/null +++ b/node_modules/qs/package.json @@ -0,0 +1,58 @@ +{ + "name": "qs", + "description": "A querystring parser that supports nesting and arrays, with a depth limit", + "homepage": "https://github.com/ljharb/qs", + "version": "6.7.0", + "repository": { + "type": "git", + "url": "https://github.com/ljharb/qs.git" + }, + "main": "lib/index.js", + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "keywords": [ + "querystring", + "qs", + "query", + "url", + "parse", + "stringify" + ], + "engines": { + "node": ">=0.6" + }, + "dependencies": {}, + "devDependencies": { + "@ljharb/eslint-config": "^13.1.1", + "browserify": "^16.2.3", + "covert": "^1.1.1", + "editorconfig-tools": "^0.1.1", + "eslint": "^5.15.3", + "evalmd": "^0.0.17", + "for-each": "^0.3.3", + "iconv-lite": "^0.4.24", + "mkdirp": "^0.5.1", + "object-inspect": "^1.6.0", + "qs-iconv": "^1.0.4", + "safe-publish-latest": "^1.1.2", + "safer-buffer": "^2.1.2", + "tape": "^4.10.1" + }, + "scripts": { + "prepublish": "safe-publish-latest && npm run dist", + "pretest": "npm run --silent readme && npm run --silent lint", + "test": "npm run --silent coverage", + "tests-only": "node test", + "readme": "evalmd README.md", + "postlint": "editorconfig-tools check * lib/* test/*", + "lint": "eslint lib/*.js test/*.js", + "coverage": "covert test", + "dist": "mkdirp dist && browserify --standalone Qs lib/index.js > dist/qs.js" + }, + "license": "BSD-3-Clause" +} diff --git a/node_modules/qs/test/.eslintrc b/node_modules/qs/test/.eslintrc new file mode 100644 index 00000000..9ebbb921 --- /dev/null +++ b/node_modules/qs/test/.eslintrc @@ -0,0 +1,17 @@ +{ + "rules": { + "array-bracket-newline": 0, + "array-element-newline": 0, + "consistent-return": 2, + "function-paren-newline": 0, + "max-lines": 0, + "max-lines-per-function": 0, + "max-nested-callbacks": [2, 3], + "max-statements": 0, + "no-buffer-constructor": 0, + "no-extend-native": 0, + "no-magic-numbers": 0, + "object-curly-newline": 0, + "sort-keys": 0 + } +} diff --git a/node_modules/qs/test/index.js b/node_modules/qs/test/index.js new file mode 100644 index 00000000..5e6bc8fb --- /dev/null +++ b/node_modules/qs/test/index.js @@ -0,0 +1,7 @@ +'use strict'; + +require('./parse'); + +require('./stringify'); + +require('./utils'); diff --git a/node_modules/qs/test/parse.js b/node_modules/qs/test/parse.js new file mode 100644 index 00000000..89677899 --- /dev/null +++ b/node_modules/qs/test/parse.js @@ -0,0 +1,676 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var utils = require('../lib/utils'); +var iconv = require('iconv-lite'); +var SaferBuffer = require('safer-buffer').Buffer; + +test('parse()', function (t) { + t.test('parses a simple string', function (st) { + st.deepEqual(qs.parse('0=foo'), { 0: 'foo' }); + st.deepEqual(qs.parse('foo=c++'), { foo: 'c ' }); + st.deepEqual(qs.parse('a[>=]=23'), { a: { '>=': '23' } }); + st.deepEqual(qs.parse('a[<=>]==23'), { a: { '<=>': '=23' } }); + st.deepEqual(qs.parse('a[==]=23'), { a: { '==': '23' } }); + st.deepEqual(qs.parse('foo', { strictNullHandling: true }), { foo: null }); + st.deepEqual(qs.parse('foo'), { foo: '' }); + st.deepEqual(qs.parse('foo='), { foo: '' }); + st.deepEqual(qs.parse('foo=bar'), { foo: 'bar' }); + st.deepEqual(qs.parse(' foo = bar = baz '), { ' foo ': ' bar = baz ' }); + st.deepEqual(qs.parse('foo=bar=baz'), { foo: 'bar=baz' }); + st.deepEqual(qs.parse('foo=bar&bar=baz'), { foo: 'bar', bar: 'baz' }); + st.deepEqual(qs.parse('foo2=bar2&baz2='), { foo2: 'bar2', baz2: '' }); + st.deepEqual(qs.parse('foo=bar&baz', { strictNullHandling: true }), { foo: 'bar', baz: null }); + st.deepEqual(qs.parse('foo=bar&baz'), { foo: 'bar', baz: '' }); + st.deepEqual(qs.parse('cht=p3&chd=t:60,40&chs=250x100&chl=Hello|World'), { + cht: 'p3', + chd: 't:60,40', + chs: '250x100', + chl: 'Hello|World' + }); + st.end(); + }); + + t.test('allows enabling dot notation', function (st) { + st.deepEqual(qs.parse('a.b=c'), { 'a.b': 'c' }); + st.deepEqual(qs.parse('a.b=c', { allowDots: true }), { a: { b: 'c' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[b]=c'), { a: { b: 'c' } }, 'parses a single nested string'); + t.deepEqual(qs.parse('a[b][c]=d'), { a: { b: { c: 'd' } } }, 'parses a double nested string'); + t.deepEqual( + qs.parse('a[b][c][d][e][f][g][h]=i'), + { a: { b: { c: { d: { e: { f: { '[g][h]': 'i' } } } } } } }, + 'defaults to a depth of 5' + ); + + t.test('only parses one level when depth = 1', function (st) { + st.deepEqual(qs.parse('a[b][c]=d', { depth: 1 }), { a: { b: { '[c]': 'd' } } }); + st.deepEqual(qs.parse('a[b][c][d]=e', { depth: 1 }), { a: { b: { '[c][d]': 'e' } } }); + st.end(); + }); + + t.deepEqual(qs.parse('a=b&a=c'), { a: ['b', 'c'] }, 'parses a simple array'); + + t.test('parses an explicit array', function (st) { + st.deepEqual(qs.parse('a[]=b'), { a: ['b'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c&a[]=d'), { a: ['b', 'c', 'd'] }); + st.end(); + }); + + t.test('parses a mix of simple and explicit arrays', function (st) { + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[0]=c'), { a: ['b', 'c'] }); + + st.deepEqual(qs.parse('a[1]=b&a=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + + st.deepEqual(qs.parse('a=b&a[1]=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[]=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + + st.end(); + }); + + t.test('parses a nested array', function (st) { + st.deepEqual(qs.parse('a[b][]=c&a[b][]=d'), { a: { b: ['c', 'd'] } }); + st.deepEqual(qs.parse('a[>=]=25'), { a: { '>=': '25' } }); + st.end(); + }); + + t.test('allows to specify array indices', function (st) { + st.deepEqual(qs.parse('a[1]=c&a[0]=b&a[2]=d'), { a: ['b', 'c', 'd'] }); + st.deepEqual(qs.parse('a[1]=c&a[0]=b'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 20 }), { a: ['c'] }); + st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 0 }), { a: { 1: 'c' } }); + st.deepEqual(qs.parse('a[1]=c'), { a: ['c'] }); + st.end(); + }); + + t.test('limits specific array indices to arrayLimit', function (st) { + st.deepEqual(qs.parse('a[20]=a', { arrayLimit: 20 }), { a: ['a'] }); + st.deepEqual(qs.parse('a[21]=a', { arrayLimit: 20 }), { a: { 21: 'a' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[12b]=c'), { a: { '12b': 'c' } }, 'supports keys that begin with a number'); + + t.test('supports encoded = signs', function (st) { + st.deepEqual(qs.parse('he%3Dllo=th%3Dere'), { 'he=llo': 'th=ere' }); + st.end(); + }); + + t.test('is ok with url encoded strings', function (st) { + st.deepEqual(qs.parse('a[b%20c]=d'), { a: { 'b c': 'd' } }); + st.deepEqual(qs.parse('a[b]=c%20d'), { a: { b: 'c d' } }); + st.end(); + }); + + t.test('allows brackets in the value', function (st) { + st.deepEqual(qs.parse('pets=["tobi"]'), { pets: '["tobi"]' }); + st.deepEqual(qs.parse('operators=[">=", "<="]'), { operators: '[">=", "<="]' }); + st.end(); + }); + + t.test('allows empty values', function (st) { + st.deepEqual(qs.parse(''), {}); + st.deepEqual(qs.parse(null), {}); + st.deepEqual(qs.parse(undefined), {}); + st.end(); + }); + + t.test('transforms arrays to objects', function (st) { + st.deepEqual(qs.parse('foo[0]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[0]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar&foo[]=foo'), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); + st.deepEqual(qs.parse('foo[0][a]=a&foo[0][b]=b&foo[1][a]=aa&foo[1][b]=bb'), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: false }), { a: { 0: 'b', t: 'u' } }); + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: true }), { a: { 0: 'b', t: 'u', hasOwnProperty: 'c' } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: false }), { a: { 0: 'b', x: 'y' } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: true }), { a: { 0: 'b', hasOwnProperty: 'c', x: 'y' } }); + st.end(); + }); + + t.test('transforms arrays to objects (dot notation)', function (st) { + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad.boo=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: { boo: 'baz' } } }); + st.deepEqual(qs.parse('foo[0][0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [[{ baz: 'bar' }]], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15'], bar: '2' }] }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].baz[1]=16&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15', '16'], bar: '2' }] }); + st.deepEqual(qs.parse('foo.bad=baz&foo[0]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo.bad=baz', { allowDots: true }), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar&foo[]=foo', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); + st.deepEqual(qs.parse('foo[0].a=a&foo[0].b=b&foo[1].a=aa&foo[1].b=bb', { allowDots: true }), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + st.end(); + }); + + t.test('correctly prunes undefined values when converting an array to an object', function (st) { + st.deepEqual(qs.parse('a[2]=b&a[99999999]=c'), { a: { 2: 'b', 99999999: 'c' } }); + st.end(); + }); + + t.test('supports malformed uri characters', function (st) { + st.deepEqual(qs.parse('{%:%}', { strictNullHandling: true }), { '{%:%}': null }); + st.deepEqual(qs.parse('{%:%}='), { '{%:%}': '' }); + st.deepEqual(qs.parse('foo=%:%}'), { foo: '%:%}' }); + st.end(); + }); + + t.test('doesn\'t produce empty keys', function (st) { + st.deepEqual(qs.parse('_r=1&'), { _r: '1' }); + st.end(); + }); + + t.test('cannot access Object prototype', function (st) { + qs.parse('constructor[prototype][bad]=bad'); + qs.parse('bad[constructor][prototype][bad]=bad'); + st.equal(typeof Object.prototype.bad, 'undefined'); + st.end(); + }); + + t.test('parses arrays of objects', function (st) { + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + st.deepEqual(qs.parse('a[0][b]=c'), { a: [{ b: 'c' }] }); + st.end(); + }); + + t.test('allows for empty strings in arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=&a[]=c'), { a: ['b', '', 'c'] }); + + st.deepEqual( + qs.parse('a[0]=b&a[1]&a[2]=c&a[19]=', { strictNullHandling: true, arrayLimit: 20 }), + { a: ['b', null, 'c', ''] }, + 'with arrayLimit 20 + array indices: null then empty string works' + ); + st.deepEqual( + qs.parse('a[]=b&a[]&a[]=c&a[]=', { strictNullHandling: true, arrayLimit: 0 }), + { a: ['b', null, 'c', ''] }, + 'with arrayLimit 0 + array brackets: null then empty string works' + ); + + st.deepEqual( + qs.parse('a[0]=b&a[1]=&a[2]=c&a[19]', { strictNullHandling: true, arrayLimit: 20 }), + { a: ['b', '', 'c', null] }, + 'with arrayLimit 20 + array indices: empty string then null works' + ); + st.deepEqual( + qs.parse('a[]=b&a[]=&a[]=c&a[]', { strictNullHandling: true, arrayLimit: 0 }), + { a: ['b', '', 'c', null] }, + 'with arrayLimit 0 + array brackets: empty string then null works' + ); + + st.deepEqual( + qs.parse('a[]=&a[]=b&a[]=c'), + { a: ['', 'b', 'c'] }, + 'array brackets: empty strings work' + ); + st.end(); + }); + + t.test('compacts sparse arrays', function (st) { + st.deepEqual(qs.parse('a[10]=1&a[2]=2', { arrayLimit: 20 }), { a: ['2', '1'] }); + st.deepEqual(qs.parse('a[1][b][2][c]=1', { arrayLimit: 20 }), { a: [{ b: [{ c: '1' }] }] }); + st.deepEqual(qs.parse('a[1][2][3][c]=1', { arrayLimit: 20 }), { a: [[[{ c: '1' }]]] }); + st.deepEqual(qs.parse('a[1][2][3][c][1]=1', { arrayLimit: 20 }), { a: [[[{ c: ['1'] }]]] }); + st.end(); + }); + + t.test('parses semi-parsed strings', function (st) { + st.deepEqual(qs.parse({ 'a[b]': 'c' }), { a: { b: 'c' } }); + st.deepEqual(qs.parse({ 'a[b]': 'c', 'a[d]': 'e' }), { a: { b: 'c', d: 'e' } }); + st.end(); + }); + + t.test('parses buffers correctly', function (st) { + var b = SaferBuffer.from('test'); + st.deepEqual(qs.parse({ a: b }), { a: b }); + st.end(); + }); + + t.test('parses jquery-param strings', function (st) { + // readable = 'filter[0][]=int1&filter[0][]==&filter[0][]=77&filter[]=and&filter[2][]=int2&filter[2][]==&filter[2][]=8' + var encoded = 'filter%5B0%5D%5B%5D=int1&filter%5B0%5D%5B%5D=%3D&filter%5B0%5D%5B%5D=77&filter%5B%5D=and&filter%5B2%5D%5B%5D=int2&filter%5B2%5D%5B%5D=%3D&filter%5B2%5D%5B%5D=8'; + var expected = { filter: [['int1', '=', '77'], 'and', ['int2', '=', '8']] }; + st.deepEqual(qs.parse(encoded), expected); + st.end(); + }); + + t.test('continues parsing when no parent is found', function (st) { + st.deepEqual(qs.parse('[]=&a=b'), { 0: '', a: 'b' }); + st.deepEqual(qs.parse('[]&a=b', { strictNullHandling: true }), { 0: null, a: 'b' }); + st.deepEqual(qs.parse('[foo]=bar'), { foo: 'bar' }); + st.end(); + }); + + t.test('does not error when parsing a very long array', function (st) { + var str = 'a[]=a'; + while (Buffer.byteLength(str) < 128 * 1024) { + str = str + '&' + str; + } + + st.doesNotThrow(function () { + qs.parse(str); + }); + + st.end(); + }); + + t.test('should not throw when a native prototype has an enumerable property', function (st) { + Object.prototype.crash = ''; + Array.prototype.crash = ''; + st.doesNotThrow(qs.parse.bind(null, 'a=b')); + st.deepEqual(qs.parse('a=b'), { a: 'b' }); + st.doesNotThrow(qs.parse.bind(null, 'a[][b]=c')); + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + delete Object.prototype.crash; + delete Array.prototype.crash; + st.end(); + }); + + t.test('parses a string with an alternative string delimiter', function (st) { + st.deepEqual(qs.parse('a=b;c=d', { delimiter: ';' }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('parses a string with an alternative RegExp delimiter', function (st) { + st.deepEqual(qs.parse('a=b; c=d', { delimiter: /[;,] */ }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not use non-splittable objects as delimiters', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { delimiter: true }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding parameter limit', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: 1 }), { a: 'b' }); + st.end(); + }); + + t.test('allows setting the parameter limit to Infinity', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: Infinity }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding array limit', function (st) { + st.deepEqual(qs.parse('a[0]=b', { arrayLimit: -1 }), { a: { 0: 'b' } }); + st.deepEqual(qs.parse('a[-1]=b', { arrayLimit: -1 }), { a: { '-1': 'b' } }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayLimit: 0 }), { a: { 0: 'b', 1: 'c' } }); + st.end(); + }); + + t.test('allows disabling array parsing', function (st) { + var indices = qs.parse('a[0]=b&a[1]=c', { parseArrays: false }); + st.deepEqual(indices, { a: { 0: 'b', 1: 'c' } }); + st.equal(Array.isArray(indices.a), false, 'parseArrays:false, indices case is not an array'); + + var emptyBrackets = qs.parse('a[]=b', { parseArrays: false }); + st.deepEqual(emptyBrackets, { a: { 0: 'b' } }); + st.equal(Array.isArray(emptyBrackets.a), false, 'parseArrays:false, empty brackets case is not an array'); + + st.end(); + }); + + t.test('allows for query string prefix', function (st) { + st.deepEqual(qs.parse('?foo=bar', { ignoreQueryPrefix: true }), { foo: 'bar' }); + st.deepEqual(qs.parse('foo=bar', { ignoreQueryPrefix: true }), { foo: 'bar' }); + st.deepEqual(qs.parse('?foo=bar', { ignoreQueryPrefix: false }), { '?foo': 'bar' }); + st.end(); + }); + + t.test('parses an object', function (st) { + var input = { + 'user[name]': { 'pop[bob]': 3 }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses string with comma as array divider', function (st) { + st.deepEqual(qs.parse('foo=bar,tee', { comma: true }), { foo: ['bar', 'tee'] }); + st.deepEqual(qs.parse('foo[bar]=coffee,tee', { comma: true }), { foo: { bar: ['coffee', 'tee'] } }); + st.deepEqual(qs.parse('foo=', { comma: true }), { foo: '' }); + st.deepEqual(qs.parse('foo', { comma: true }), { foo: '' }); + st.deepEqual(qs.parse('foo', { comma: true, strictNullHandling: true }), { foo: null }); + st.end(); + }); + + t.test('parses an object in dot notation', function (st) { + var input = { + 'user.name': { 'pop[bob]': 3 }, + 'user.email.': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input, { allowDots: true }); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses an object and not child values', function (st) { + var input = { + 'user[name]': { 'pop[bob]': { test: 3 } }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': { test: 3 } }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('does not blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.parse('a=b&c=d'); + global.Buffer = tempBuffer; + st.deepEqual(result, { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not crash when parsing circular references', function (st) { + var a = {}; + a.b = a; + + var parsed; + + st.doesNotThrow(function () { + parsed = qs.parse({ 'foo[bar]': 'baz', 'foo[baz]': a }); + }); + + st.equal('foo' in parsed, true, 'parsed has "foo" property'); + st.equal('bar' in parsed.foo, true); + st.equal('baz' in parsed.foo, true); + st.equal(parsed.foo.bar, 'baz'); + st.deepEqual(parsed.foo.baz, a); + st.end(); + }); + + t.test('does not crash when parsing deep objects', function (st) { + var parsed; + var str = 'foo'; + + for (var i = 0; i < 5000; i++) { + str += '[p]'; + } + + str += '=bar'; + + st.doesNotThrow(function () { + parsed = qs.parse(str, { depth: 5000 }); + }); + + st.equal('foo' in parsed, true, 'parsed has "foo" property'); + + var depth = 0; + var ref = parsed.foo; + while ((ref = ref.p)) { + depth += 1; + } + + st.equal(depth, 5000, 'parsed is 5000 properties deep'); + + st.end(); + }); + + t.test('parses null objects correctly', { skip: !Object.create }, function (st) { + var a = Object.create(null); + a.b = 'c'; + + st.deepEqual(qs.parse(a), { b: 'c' }); + var result = qs.parse({ a: a }); + st.equal('a' in result, true, 'result has "a" property'); + st.deepEqual(result.a, a); + st.end(); + }); + + t.test('parses dates correctly', function (st) { + var now = new Date(); + st.deepEqual(qs.parse({ a: now }), { a: now }); + st.end(); + }); + + t.test('parses regular expressions correctly', function (st) { + var re = /^test$/; + st.deepEqual(qs.parse({ a: re }), { a: re }); + st.end(); + }); + + t.test('does not allow overwriting prototype properties', function (st) { + st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: false }), {}); + st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: false }), {}); + + st.deepEqual( + qs.parse('toString', { allowPrototypes: false }), + {}, + 'bare "toString" results in {}' + ); + + st.end(); + }); + + t.test('can allow overwriting prototype properties', function (st) { + st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }), { a: { hasOwnProperty: 'b' } }); + st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: true }), { hasOwnProperty: 'b' }); + + st.deepEqual( + qs.parse('toString', { allowPrototypes: true }), + { toString: '' }, + 'bare "toString" results in { toString: "" }' + ); + + st.end(); + }); + + t.test('params starting with a closing bracket', function (st) { + st.deepEqual(qs.parse(']=toString'), { ']': 'toString' }); + st.deepEqual(qs.parse(']]=toString'), { ']]': 'toString' }); + st.deepEqual(qs.parse(']hello]=toString'), { ']hello]': 'toString' }); + st.end(); + }); + + t.test('params starting with a starting bracket', function (st) { + st.deepEqual(qs.parse('[=toString'), { '[': 'toString' }); + st.deepEqual(qs.parse('[[=toString'), { '[[': 'toString' }); + st.deepEqual(qs.parse('[hello[=toString'), { '[hello[': 'toString' }); + st.end(); + }); + + t.test('add keys to objects', function (st) { + st.deepEqual( + qs.parse('a[b]=c&a=d'), + { a: { b: 'c', d: true } }, + 'can add keys to objects' + ); + + st.deepEqual( + qs.parse('a[b]=c&a=toString'), + { a: { b: 'c' } }, + 'can not overwrite prototype' + ); + + st.deepEqual( + qs.parse('a[b]=c&a=toString', { allowPrototypes: true }), + { a: { b: 'c', toString: true } }, + 'can overwrite prototype with allowPrototypes true' + ); + + st.deepEqual( + qs.parse('a[b]=c&a=toString', { plainObjects: true }), + { a: { b: 'c', toString: true } }, + 'can overwrite prototype with plainObjects true' + ); + + st.end(); + }); + + t.test('can return null objects', { skip: !Object.create }, function (st) { + var expected = Object.create(null); + expected.a = Object.create(null); + expected.a.b = 'c'; + expected.a.hasOwnProperty = 'd'; + st.deepEqual(qs.parse('a[b]=c&a[hasOwnProperty]=d', { plainObjects: true }), expected); + st.deepEqual(qs.parse(null, { plainObjects: true }), Object.create(null)); + var expectedArray = Object.create(null); + expectedArray.a = Object.create(null); + expectedArray.a[0] = 'b'; + expectedArray.a.c = 'd'; + st.deepEqual(qs.parse('a[]=b&a[c]=d', { plainObjects: true }), expectedArray); + st.end(); + }); + + t.test('can parse with custom encoding', function (st) { + st.deepEqual(qs.parse('%8c%a7=%91%e5%8d%e3%95%7b', { + decoder: function (str) { + var reg = /%([0-9A-F]{2})/ig; + var result = []; + var parts = reg.exec(str); + while (parts) { + result.push(parseInt(parts[1], 16)); + parts = reg.exec(str); + } + return String(iconv.decode(SaferBuffer.from(result), 'shift_jis')); + } + }), { 県: '大阪府' }); + st.end(); + }); + + t.test('receives the default decoder as a second argument', function (st) { + st.plan(1); + qs.parse('a', { + decoder: function (str, defaultDecoder) { + st.equal(defaultDecoder, utils.decode); + } + }); + st.end(); + }); + + t.test('throws error with wrong decoder', function (st) { + st['throws'](function () { + qs.parse({}, { decoder: 'string' }); + }, new TypeError('Decoder has to be a function.')); + st.end(); + }); + + t.test('does not mutate the options argument', function (st) { + var options = {}; + qs.parse('a[b]=true', options); + st.deepEqual(options, {}); + st.end(); + }); + + t.test('throws if an invalid charset is specified', function (st) { + st['throws'](function () { + qs.parse('a=b', { charset: 'foobar' }); + }, new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined')); + st.end(); + }); + + t.test('parses an iso-8859-1 string if asked to', function (st) { + st.deepEqual(qs.parse('%A2=%BD', { charset: 'iso-8859-1' }), { '¢': '½' }); + st.end(); + }); + + var urlEncodedCheckmarkInUtf8 = '%E2%9C%93'; + var urlEncodedOSlashInUtf8 = '%C3%B8'; + var urlEncodedNumCheckmark = '%26%2310003%3B'; + var urlEncodedNumSmiley = '%26%239786%3B'; + + t.test('prefers an utf-8 charset specified by the utf8 sentinel to a default charset of iso-8859-1', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedCheckmarkInUtf8 + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'iso-8859-1' }), { ø: 'ø' }); + st.end(); + }); + + t.test('prefers an iso-8859-1 charset specified by the utf8 sentinel to a default charset of utf-8', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedNumCheckmark + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'utf-8' }), { 'ø': 'ø' }); + st.end(); + }); + + t.test('does not require the utf8 sentinel to be defined before the parameters whose decoding it affects', function (st) { + st.deepEqual(qs.parse('a=' + urlEncodedOSlashInUtf8 + '&utf8=' + urlEncodedNumCheckmark, { charsetSentinel: true, charset: 'utf-8' }), { a: 'ø' }); + st.end(); + }); + + t.test('should ignore an utf8 sentinel with an unknown value', function (st) { + st.deepEqual(qs.parse('utf8=foo&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'utf-8' }), { ø: 'ø' }); + st.end(); + }); + + t.test('uses the utf8 sentinel to switch to utf-8 when no default charset is given', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedCheckmarkInUtf8 + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true }), { ø: 'ø' }); + st.end(); + }); + + t.test('uses the utf8 sentinel to switch to iso-8859-1 when no default charset is given', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedNumCheckmark + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true }), { 'ø': 'ø' }); + st.end(); + }); + + t.test('interprets numeric entities in iso-8859-1 when `interpretNumericEntities`', function (st) { + st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'iso-8859-1', interpretNumericEntities: true }), { foo: '☺' }); + st.end(); + }); + + t.test('handles a custom decoder returning `null`, in the `iso-8859-1` charset, when `interpretNumericEntities`', function (st) { + st.deepEqual(qs.parse('foo=&bar=' + urlEncodedNumSmiley, { + charset: 'iso-8859-1', + decoder: function (str, defaultDecoder, charset) { + return str ? defaultDecoder(str, defaultDecoder, charset) : null; + }, + interpretNumericEntities: true + }), { foo: null, bar: '☺' }); + st.end(); + }); + + t.test('does not interpret numeric entities in iso-8859-1 when `interpretNumericEntities` is absent', function (st) { + st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'iso-8859-1' }), { foo: '☺' }); + st.end(); + }); + + t.test('does not interpret numeric entities when the charset is utf-8, even when `interpretNumericEntities`', function (st) { + st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'utf-8', interpretNumericEntities: true }), { foo: '☺' }); + st.end(); + }); + + t.test('does not interpret %uXXXX syntax in iso-8859-1 mode', function (st) { + st.deepEqual(qs.parse('%u263A=%u263A', { charset: 'iso-8859-1' }), { '%u263A': '%u263A' }); + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/qs/test/stringify.js b/node_modules/qs/test/stringify.js new file mode 100644 index 00000000..53041c2e --- /dev/null +++ b/node_modules/qs/test/stringify.js @@ -0,0 +1,679 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var utils = require('../lib/utils'); +var iconv = require('iconv-lite'); +var SaferBuffer = require('safer-buffer').Buffer; + +test('stringify()', function (t) { + t.test('stringifies a querystring object', function (st) { + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: 1 }), 'a=1'); + st.equal(qs.stringify({ a: 1, b: 2 }), 'a=1&b=2'); + st.equal(qs.stringify({ a: 'A_Z' }), 'a=A_Z'); + st.equal(qs.stringify({ a: '€' }), 'a=%E2%82%AC'); + st.equal(qs.stringify({ a: '' }), 'a=%EE%80%80'); + st.equal(qs.stringify({ a: 'א' }), 'a=%D7%90'); + st.equal(qs.stringify({ a: '𐐷' }), 'a=%F0%90%90%B7'); + st.end(); + }); + + t.test('stringifies falsy values', function (st) { + st.equal(qs.stringify(undefined), ''); + st.equal(qs.stringify(null), ''); + st.equal(qs.stringify(null, { strictNullHandling: true }), ''); + st.equal(qs.stringify(false), ''); + st.equal(qs.stringify(0), ''); + st.end(); + }); + + t.test('adds query prefix', function (st) { + st.equal(qs.stringify({ a: 'b' }, { addQueryPrefix: true }), '?a=b'); + st.end(); + }); + + t.test('with query prefix, outputs blank string given an empty object', function (st) { + st.equal(qs.stringify({}, { addQueryPrefix: true }), ''); + st.end(); + }); + + t.test('stringifies nested falsy values', function (st) { + st.equal(qs.stringify({ a: { b: { c: null } } }), 'a%5Bb%5D%5Bc%5D='); + st.equal(qs.stringify({ a: { b: { c: null } } }, { strictNullHandling: true }), 'a%5Bb%5D%5Bc%5D'); + st.equal(qs.stringify({ a: { b: { c: false } } }), 'a%5Bb%5D%5Bc%5D=false'); + st.end(); + }); + + t.test('stringifies a nested object', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }), 'a%5Bb%5D%5Bc%5D%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies a nested object with dots notation', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e'); + st.end(); + }); + + t.test('stringifies an array value', function (st) { + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'indices' }), + 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', + 'indices => indices' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'brackets' }), + 'a%5B%5D=b&a%5B%5D=c&a%5B%5D=d', + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'comma' }), + 'a=b%2Cc%2Cd', + 'comma => comma' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }), + 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', + 'default => indices' + ); + st.end(); + }); + + t.test('omits nulls when asked', function (st) { + st.equal(qs.stringify({ a: 'b', c: null }, { skipNulls: true }), 'a=b'); + st.end(); + }); + + t.test('omits nested nulls when asked', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: null } }, { skipNulls: true }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('omits array indices when asked', function (st) { + st.equal(qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }), 'a=b&a=c&a=d'); + st.end(); + }); + + t.test('stringifies a nested array value', function (st) { + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'indices' }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'brackets' }), 'a%5Bb%5D%5B%5D=c&a%5Bb%5D%5B%5D=d'); + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'comma' }), 'a%5Bb%5D=c%2Cd'); // a[b]=c,d + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); + st.end(); + }); + + t.test('stringifies a nested array value with dots notation', function (st) { + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a.b[0]=c&a.b[1]=d', + 'indices: stringifies with dots + indices' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a.b[]=c&a.b[]=d', + 'brackets: stringifies with dots + brackets' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false, arrayFormat: 'comma' } + ), + 'a.b=c,d', + 'comma: stringifies with dots + comma' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false } + ), + 'a.b[0]=c&a.b[1]=d', + 'default: stringifies with dots + indices' + ); + st.end(); + }); + + t.test('stringifies an object inside an array', function (st) { + st.equal( + qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'indices' }), + 'a%5B0%5D%5Bb%5D=c', // a[0][b]=c + 'indices => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'brackets' }), + 'a%5B%5D%5Bb%5D=c', // a[][b]=c + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 'c' }] }), + 'a%5B0%5D%5Bb%5D=c', + 'default => indices' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'indices' }), + 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', + 'indices => indices' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'brackets' }), + 'a%5B%5D%5Bb%5D%5Bc%5D%5B%5D=1', + 'brackets => brackets' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }), + 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', + 'default => indices' + ); + + st.end(); + }); + + t.test('stringifies an array with mixed objects and primitives', function (st) { + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'indices' }), + 'a[0][b]=1&a[1]=2&a[2]=3', + 'indices => indices' + ); + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'brackets' }), + 'a[][b]=1&a[]=2&a[]=3', + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false }), + 'a[0][b]=1&a[1]=2&a[2]=3', + 'default => indices' + ); + + st.end(); + }); + + t.test('stringifies an object inside an array with dots notation', function (st) { + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a[0].b=c', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a[].b=c', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false } + ), + 'a[0].b=c', + 'default => indices' + ); + + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a[0].b.c[0]=1', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a[].b.c[]=1', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false } + ), + 'a[0].b.c[0]=1', + 'default => indices' + ); + + st.end(); + }); + + t.test('does not omit object keys when indices = false', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when indices=true', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { indices: true }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat is specified', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat=indices', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses repeat notation for arrays when no arrayFormat=repeat', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }), 'a=b&a=c'); + st.end(); + }); + + t.test('uses brackets notation for arrays when no arrayFormat=brackets', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }), 'a%5B%5D=b&a%5B%5D=c'); + st.end(); + }); + + t.test('stringifies a complicated object', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: 'e' } }), 'a%5Bb%5D=c&a%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies an empty value', function (st) { + st.equal(qs.stringify({ a: '' }), 'a='); + st.equal(qs.stringify({ a: null }, { strictNullHandling: true }), 'a'); + + st.equal(qs.stringify({ a: '', b: '' }), 'a=&b='); + st.equal(qs.stringify({ a: null, b: '' }, { strictNullHandling: true }), 'a&b='); + + st.equal(qs.stringify({ a: { b: '' } }), 'a%5Bb%5D='); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: true }), 'a%5Bb%5D'); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: false }), 'a%5Bb%5D='); + + st.end(); + }); + + t.test('stringifies a null object', { skip: !Object.create }, function (st) { + var obj = Object.create(null); + obj.a = 'b'; + st.equal(qs.stringify(obj), 'a=b'); + st.end(); + }); + + t.test('returns an empty string for invalid input', function (st) { + st.equal(qs.stringify(undefined), ''); + st.equal(qs.stringify(false), ''); + st.equal(qs.stringify(null), ''); + st.equal(qs.stringify(''), ''); + st.end(); + }); + + t.test('stringifies an object with a null object as a child', { skip: !Object.create }, function (st) { + var obj = { a: Object.create(null) }; + + obj.a.b = 'c'; + st.equal(qs.stringify(obj), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('drops keys with a value of undefined', function (st) { + st.equal(qs.stringify({ a: undefined }), ''); + + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true }), 'a%5Bc%5D'); + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false }), 'a%5Bc%5D='); + st.equal(qs.stringify({ a: { b: undefined, c: '' } }), 'a%5Bc%5D='); + st.end(); + }); + + t.test('url encodes values', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.end(); + }); + + t.test('stringifies a date', function (st) { + var now = new Date(); + var str = 'a=' + encodeURIComponent(now.toISOString()); + st.equal(qs.stringify({ a: now }), str); + st.end(); + }); + + t.test('stringifies the weird object from qs', function (st) { + st.equal(qs.stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' }), 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F'); + st.end(); + }); + + t.test('skips properties that are part of the object prototype', function (st) { + Object.prototype.crash = 'test'; + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + delete Object.prototype.crash; + st.end(); + }); + + t.test('stringifies boolean values', function (st) { + st.equal(qs.stringify({ a: true }), 'a=true'); + st.equal(qs.stringify({ a: { b: true } }), 'a%5Bb%5D=true'); + st.equal(qs.stringify({ b: false }), 'b=false'); + st.equal(qs.stringify({ b: { c: false } }), 'b%5Bc%5D=false'); + st.end(); + }); + + t.test('stringifies buffer values', function (st) { + st.equal(qs.stringify({ a: SaferBuffer.from('test') }), 'a=test'); + st.equal(qs.stringify({ a: { b: SaferBuffer.from('test') } }), 'a%5Bb%5D=test'); + st.end(); + }); + + t.test('stringifies an object using an alternative delimiter', function (st) { + st.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); + st.end(); + }); + + t.test('doesn\'t blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.stringify({ a: 'b', c: 'd' }); + global.Buffer = tempBuffer; + st.equal(result, 'a=b&c=d'); + st.end(); + }); + + t.test('selects properties when filter=array', function (st) { + st.equal(qs.stringify({ a: 'b' }, { filter: ['a'] }), 'a=b'); + st.equal(qs.stringify({ a: 1 }, { filter: [] }), ''); + + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2], arrayFormat: 'indices' } + ), + 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2], arrayFormat: 'brackets' } + ), + 'a%5Bb%5D%5B%5D=1&a%5Bb%5D%5B%5D=3', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2] } + ), + 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', + 'default => indices' + ); + + st.end(); + }); + + t.test('supports custom representations when filter=function', function (st) { + var calls = 0; + var obj = { a: 'b', c: 'd', e: { f: new Date(1257894000000) } }; + var filterFunc = function (prefix, value) { + calls += 1; + if (calls === 1) { + st.equal(prefix, '', 'prefix is empty'); + st.equal(value, obj); + } else if (prefix === 'c') { + return void 0; + } else if (value instanceof Date) { + st.equal(prefix, 'e[f]'); + return value.getTime(); + } + return value; + }; + + st.equal(qs.stringify(obj, { filter: filterFunc }), 'a=b&e%5Bf%5D=1257894000000'); + st.equal(calls, 5); + st.end(); + }); + + t.test('can disable uri encoding', function (st) { + st.equal(qs.stringify({ a: 'b' }, { encode: false }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }, { encode: false }), 'a[b]=c'); + st.equal(qs.stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false }), 'a=b&c'); + st.end(); + }); + + t.test('can sort the keys', function (st) { + var sort = function (a, b) { + return a.localeCompare(b); + }; + st.equal(qs.stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort }), 'a=c&b=f&z=y'); + st.equal(qs.stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a'); + st.end(); + }); + + t.test('can sort the keys at depth 3 or more too', function (st) { + var sort = function (a, b) { + return a.localeCompare(b); + }; + st.equal( + qs.stringify( + { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, + { sort: sort, encode: false } + ), + 'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb' + ); + st.equal( + qs.stringify( + { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, + { sort: null, encode: false } + ), + 'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b' + ); + st.end(); + }); + + t.test('can stringify with custom encoding', function (st) { + st.equal(qs.stringify({ 県: '大阪府', '': '' }, { + encoder: function (str) { + if (str.length === 0) { + return ''; + } + var buf = iconv.encode(str, 'shiftjis'); + var result = []; + for (var i = 0; i < buf.length; ++i) { + result.push(buf.readUInt8(i).toString(16)); + } + return '%' + result.join('%'); + } + }), '%8c%a7=%91%e5%8d%e3%95%7b&='); + st.end(); + }); + + t.test('receives the default encoder as a second argument', function (st) { + st.plan(2); + qs.stringify({ a: 1 }, { + encoder: function (str, defaultEncoder) { + st.equal(defaultEncoder, utils.encode); + } + }); + st.end(); + }); + + t.test('throws error with wrong encoder', function (st) { + st['throws'](function () { + qs.stringify({}, { encoder: 'string' }); + }, new TypeError('Encoder has to be a function.')); + st.end(); + }); + + t.test('can use custom encoder for a buffer object', { skip: typeof Buffer === 'undefined' }, function (st) { + st.equal(qs.stringify({ a: SaferBuffer.from([1]) }, { + encoder: function (buffer) { + if (typeof buffer === 'string') { + return buffer; + } + return String.fromCharCode(buffer.readUInt8(0) + 97); + } + }), 'a=b'); + st.end(); + }); + + t.test('serializeDate option', function (st) { + var date = new Date(); + st.equal( + qs.stringify({ a: date }), + 'a=' + date.toISOString().replace(/:/g, '%3A'), + 'default is toISOString' + ); + + var mutatedDate = new Date(); + mutatedDate.toISOString = function () { + throw new SyntaxError(); + }; + st['throws'](function () { + mutatedDate.toISOString(); + }, SyntaxError); + st.equal( + qs.stringify({ a: mutatedDate }), + 'a=' + Date.prototype.toISOString.call(mutatedDate).replace(/:/g, '%3A'), + 'toISOString works even when method is not locally present' + ); + + var specificDate = new Date(6); + st.equal( + qs.stringify( + { a: specificDate }, + { serializeDate: function (d) { return d.getTime() * 7; } } + ), + 'a=42', + 'custom serializeDate function called' + ); + + st.end(); + }); + + t.test('RFC 1738 spaces serialization', function (st) { + st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC1738 }), 'a=b+c'); + st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC1738 }), 'a+b=c+d'); + st.end(); + }); + + t.test('RFC 3986 spaces serialization', function (st) { + st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC3986 }), 'a=b%20c'); + st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC3986 }), 'a%20b=c%20d'); + st.end(); + }); + + t.test('Backward compatibility to RFC 3986', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.end(); + }); + + t.test('Edge cases and unknown formats', function (st) { + ['UFO1234', false, 1234, null, {}, []].forEach( + function (format) { + st['throws']( + function () { + qs.stringify({ a: 'b c' }, { format: format }); + }, + new TypeError('Unknown format option provided.') + ); + } + ); + st.end(); + }); + + t.test('encodeValuesOnly', function (st) { + st.equal( + qs.stringify( + { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, + { encodeValuesOnly: true } + ), + 'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h' + ); + st.equal( + qs.stringify( + { a: 'b', c: ['d', 'e'], f: [['g'], ['h']] } + ), + 'a=b&c%5B0%5D=d&c%5B1%5D=e&f%5B0%5D%5B0%5D=g&f%5B1%5D%5B0%5D=h' + ); + st.end(); + }); + + t.test('encodeValuesOnly - strictNullHandling', function (st) { + st.equal( + qs.stringify( + { a: { b: null } }, + { encodeValuesOnly: true, strictNullHandling: true } + ), + 'a[b]' + ); + st.end(); + }); + + t.test('throws if an invalid charset is specified', function (st) { + st['throws'](function () { + qs.stringify({ a: 'b' }, { charset: 'foobar' }); + }, new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined')); + st.end(); + }); + + t.test('respects a charset of iso-8859-1', function (st) { + st.equal(qs.stringify({ æ: 'æ' }, { charset: 'iso-8859-1' }), '%E6=%E6'); + st.end(); + }); + + t.test('encodes unrepresentable chars as numeric entities in iso-8859-1 mode', function (st) { + st.equal(qs.stringify({ a: '☺' }, { charset: 'iso-8859-1' }), 'a=%26%239786%3B'); + st.end(); + }); + + t.test('respects an explicit charset of utf-8 (the default)', function (st) { + st.equal(qs.stringify({ a: 'æ' }, { charset: 'utf-8' }), 'a=%C3%A6'); + st.end(); + }); + + t.test('adds the right sentinel when instructed to and the charset is utf-8', function (st) { + st.equal(qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'utf-8' }), 'utf8=%E2%9C%93&a=%C3%A6'); + st.end(); + }); + + t.test('adds the right sentinel when instructed to and the charset is iso-8859-1', function (st) { + st.equal(qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' }), 'utf8=%26%2310003%3B&a=%E6'); + st.end(); + }); + + t.test('does not mutate the options argument', function (st) { + var options = {}; + qs.stringify({}, options); + st.deepEqual(options, {}); + st.end(); + }); + + t.test('strictNullHandling works with custom filter', function (st) { + var filter = function (prefix, value) { + return value; + }; + + var options = { strictNullHandling: true, filter: filter }; + st.equal(qs.stringify({ key: null }, options), 'key'); + st.end(); + }); + + t.test('strictNullHandling works with null serializeDate', function (st) { + var serializeDate = function () { + return null; + }; + var options = { strictNullHandling: true, serializeDate: serializeDate }; + var date = new Date(); + st.equal(qs.stringify({ key: date }, options), 'key'); + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/qs/test/utils.js b/node_modules/qs/test/utils.js new file mode 100644 index 00000000..da31ce53 --- /dev/null +++ b/node_modules/qs/test/utils.js @@ -0,0 +1,136 @@ +'use strict'; + +var test = require('tape'); +var inspect = require('object-inspect'); +var SaferBuffer = require('safer-buffer').Buffer; +var forEach = require('for-each'); +var utils = require('../lib/utils'); + +test('merge()', function (t) { + t.deepEqual(utils.merge(null, true), [null, true], 'merges true into null'); + + t.deepEqual(utils.merge(null, [42]), [null, 42], 'merges null into an array'); + + t.deepEqual(utils.merge({ a: 'b' }, { a: 'c' }), { a: ['b', 'c'] }, 'merges two objects with the same key'); + + var oneMerged = utils.merge({ foo: 'bar' }, { foo: { first: '123' } }); + t.deepEqual(oneMerged, { foo: ['bar', { first: '123' }] }, 'merges a standalone and an object into an array'); + + var twoMerged = utils.merge({ foo: ['bar', { first: '123' }] }, { foo: { second: '456' } }); + t.deepEqual(twoMerged, { foo: { 0: 'bar', 1: { first: '123' }, second: '456' } }, 'merges a standalone and two objects into an array'); + + var sandwiched = utils.merge({ foo: ['bar', { first: '123', second: '456' }] }, { foo: 'baz' }); + t.deepEqual(sandwiched, { foo: ['bar', { first: '123', second: '456' }, 'baz'] }, 'merges an object sandwiched by two standalones into an array'); + + var nestedArrays = utils.merge({ foo: ['baz'] }, { foo: ['bar', 'xyzzy'] }); + t.deepEqual(nestedArrays, { foo: ['baz', 'bar', 'xyzzy'] }); + + var noOptionsNonObjectSource = utils.merge({ foo: 'baz' }, 'bar'); + t.deepEqual(noOptionsNonObjectSource, { foo: 'baz', bar: true }); + + t.test( + 'avoids invoking array setters unnecessarily', + { skip: typeof Object.defineProperty !== 'function' }, + function (st) { + var setCount = 0; + var getCount = 0; + var observed = []; + Object.defineProperty(observed, 0, { + get: function () { + getCount += 1; + return { bar: 'baz' }; + }, + set: function () { setCount += 1; } + }); + utils.merge(observed, [null]); + st.equal(setCount, 0); + st.equal(getCount, 1); + observed[0] = observed[0]; // eslint-disable-line no-self-assign + st.equal(setCount, 1); + st.equal(getCount, 2); + st.end(); + } + ); + + t.end(); +}); + +test('assign()', function (t) { + var target = { a: 1, b: 2 }; + var source = { b: 3, c: 4 }; + var result = utils.assign(target, source); + + t.equal(result, target, 'returns the target'); + t.deepEqual(target, { a: 1, b: 3, c: 4 }, 'target and source are merged'); + t.deepEqual(source, { b: 3, c: 4 }, 'source is untouched'); + + t.end(); +}); + +test('combine()', function (t) { + t.test('both arrays', function (st) { + var a = [1]; + var b = [2]; + var combined = utils.combine(a, b); + + st.deepEqual(a, [1], 'a is not mutated'); + st.deepEqual(b, [2], 'b is not mutated'); + st.notEqual(a, combined, 'a !== combined'); + st.notEqual(b, combined, 'b !== combined'); + st.deepEqual(combined, [1, 2], 'combined is a + b'); + + st.end(); + }); + + t.test('one array, one non-array', function (st) { + var aN = 1; + var a = [aN]; + var bN = 2; + var b = [bN]; + + var combinedAnB = utils.combine(aN, b); + st.deepEqual(b, [bN], 'b is not mutated'); + st.notEqual(aN, combinedAnB, 'aN + b !== aN'); + st.notEqual(a, combinedAnB, 'aN + b !== a'); + st.notEqual(bN, combinedAnB, 'aN + b !== bN'); + st.notEqual(b, combinedAnB, 'aN + b !== b'); + st.deepEqual([1, 2], combinedAnB, 'first argument is array-wrapped when not an array'); + + var combinedABn = utils.combine(a, bN); + st.deepEqual(a, [aN], 'a is not mutated'); + st.notEqual(aN, combinedABn, 'a + bN !== aN'); + st.notEqual(a, combinedABn, 'a + bN !== a'); + st.notEqual(bN, combinedABn, 'a + bN !== bN'); + st.notEqual(b, combinedABn, 'a + bN !== b'); + st.deepEqual([1, 2], combinedABn, 'second argument is array-wrapped when not an array'); + + st.end(); + }); + + t.test('neither is an array', function (st) { + var combined = utils.combine(1, 2); + st.notEqual(1, combined, '1 + 2 !== 1'); + st.notEqual(2, combined, '1 + 2 !== 2'); + st.deepEqual([1, 2], combined, 'both arguments are array-wrapped when not an array'); + + st.end(); + }); + + t.end(); +}); + +test('isBuffer()', function (t) { + forEach([null, undefined, true, false, '', 'abc', 42, 0, NaN, {}, [], function () {}, /a/g], function (x) { + t.equal(utils.isBuffer(x), false, inspect(x) + ' is not a buffer'); + }); + + var fakeBuffer = { constructor: Buffer }; + t.equal(utils.isBuffer(fakeBuffer), false, 'fake buffer is not a buffer'); + + var saferBuffer = SaferBuffer.from('abc'); + t.equal(utils.isBuffer(saferBuffer), true, 'SaferBuffer instance is a buffer'); + + var buffer = Buffer.from ? Buffer.from('abc') : new Buffer('abc'); + t.equal(utils.isBuffer(buffer), true, 'real Buffer instance is a buffer'); + t.end(); +}); diff --git a/node_modules/range-parser/HISTORY.md b/node_modules/range-parser/HISTORY.md new file mode 100644 index 00000000..70a973d8 --- /dev/null +++ b/node_modules/range-parser/HISTORY.md @@ -0,0 +1,56 @@ +1.2.1 / 2019-05-10 +================== + + * Improve error when `str` is not a string + +1.2.0 / 2016-06-01 +================== + + * Add `combine` option to combine overlapping ranges + +1.1.0 / 2016-05-13 +================== + + * Fix incorrectly returning -1 when there is at least one valid range + * perf: remove internal function + +1.0.3 / 2015-10-29 +================== + + * perf: enable strict mode + +1.0.2 / 2014-09-08 +================== + + * Support Node.js 0.6 + +1.0.1 / 2014-09-07 +================== + + * Move repository to jshttp + +1.0.0 / 2013-12-11 +================== + + * Add repository to package.json + * Add MIT license + +0.0.4 / 2012-06-17 +================== + + * Change ret -1 for unsatisfiable and -2 when invalid + +0.0.3 / 2012-06-17 +================== + + * Fix last-byte-pos default to len - 1 + +0.0.2 / 2012-06-14 +================== + + * Add `.type` + +0.0.1 / 2012-06-11 +================== + + * Initial release diff --git a/node_modules/range-parser/LICENSE b/node_modules/range-parser/LICENSE new file mode 100644 index 00000000..35999543 --- /dev/null +++ b/node_modules/range-parser/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2012-2014 TJ Holowaychuk +Copyright (c) 2015-2016 Douglas Christopher Wilson + +```js +var parseRange = require('range-parser') +``` + +### parseRange(size, header, options) + +Parse the given `header` string where `size` is the maximum size of the resource. +An array of ranges will be returned or negative numbers indicating an error parsing. + + * `-2` signals a malformed header string + * `-1` signals an unsatisfiable range + + + +```js +// parse header from request +var range = parseRange(size, req.headers.range) + +// the type of the range +if (range.type === 'bytes') { + // the ranges + range.forEach(function (r) { + // do something with r.start and r.end + }) +} +``` + +#### Options + +These properties are accepted in the options object. + +##### combine + +Specifies if overlapping & adjacent ranges should be combined, defaults to `false`. +When `true`, ranges will be combined and returned as if they were specified that +way in the header. + + + +```js +parseRange(100, 'bytes=50-55,0-10,5-10,56-60', { combine: true }) +// => [ +// { start: 0, end: 10 }, +// { start: 50, end: 60 } +// ] +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/range-parser/master +[coveralls-url]: https://coveralls.io/r/jshttp/range-parser?branch=master +[node-image]: https://badgen.net/npm/node/range-parser +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/range-parser +[npm-url]: https://npmjs.org/package/range-parser +[npm-version-image]: https://badgen.net/npm/v/range-parser +[travis-image]: https://badgen.net/travis/jshttp/range-parser/master +[travis-url]: https://travis-ci.org/jshttp/range-parser diff --git a/node_modules/range-parser/index.js b/node_modules/range-parser/index.js new file mode 100644 index 00000000..b7dc5c0f --- /dev/null +++ b/node_modules/range-parser/index.js @@ -0,0 +1,162 @@ +/*! + * range-parser + * Copyright(c) 2012-2014 TJ Holowaychuk + * Copyright(c) 2015-2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = rangeParser + +/** + * Parse "Range" header `str` relative to the given file `size`. + * + * @param {Number} size + * @param {String} str + * @param {Object} [options] + * @return {Array} + * @public + */ + +function rangeParser (size, str, options) { + if (typeof str !== 'string') { + throw new TypeError('argument str must be a string') + } + + var index = str.indexOf('=') + + if (index === -1) { + return -2 + } + + // split the range string + var arr = str.slice(index + 1).split(',') + var ranges = [] + + // add ranges type + ranges.type = str.slice(0, index) + + // parse all ranges + for (var i = 0; i < arr.length; i++) { + var range = arr[i].split('-') + var start = parseInt(range[0], 10) + var end = parseInt(range[1], 10) + + // -nnn + if (isNaN(start)) { + start = size - end + end = size - 1 + // nnn- + } else if (isNaN(end)) { + end = size - 1 + } + + // limit last-byte-pos to current length + if (end > size - 1) { + end = size - 1 + } + + // invalid or unsatisifiable + if (isNaN(start) || isNaN(end) || start > end || start < 0) { + continue + } + + // add range + ranges.push({ + start: start, + end: end + }) + } + + if (ranges.length < 1) { + // unsatisifiable + return -1 + } + + return options && options.combine + ? combineRanges(ranges) + : ranges +} + +/** + * Combine overlapping & adjacent ranges. + * @private + */ + +function combineRanges (ranges) { + var ordered = ranges.map(mapWithIndex).sort(sortByRangeStart) + + for (var j = 0, i = 1; i < ordered.length; i++) { + var range = ordered[i] + var current = ordered[j] + + if (range.start > current.end + 1) { + // next range + ordered[++j] = range + } else if (range.end > current.end) { + // extend range + current.end = range.end + current.index = Math.min(current.index, range.index) + } + } + + // trim ordered array + ordered.length = j + 1 + + // generate combined range + var combined = ordered.sort(sortByRangeIndex).map(mapWithoutIndex) + + // copy ranges type + combined.type = ranges.type + + return combined +} + +/** + * Map function to add index value to ranges. + * @private + */ + +function mapWithIndex (range, index) { + return { + start: range.start, + end: range.end, + index: index + } +} + +/** + * Map function to remove index value from ranges. + * @private + */ + +function mapWithoutIndex (range) { + return { + start: range.start, + end: range.end + } +} + +/** + * Sort function to sort ranges by index. + * @private + */ + +function sortByRangeIndex (a, b) { + return a.index - b.index +} + +/** + * Sort function to sort ranges by start position. + * @private + */ + +function sortByRangeStart (a, b) { + return a.start - b.start +} diff --git a/node_modules/range-parser/package.json b/node_modules/range-parser/package.json new file mode 100644 index 00000000..abea6d85 --- /dev/null +++ b/node_modules/range-parser/package.json @@ -0,0 +1,44 @@ +{ + "name": "range-parser", + "author": "TJ Holowaychuk (http://tjholowaychuk.com)", + "description": "Range header field string parser", + "version": "1.2.1", + "contributors": [ + "Douglas Christopher Wilson ", + "James Wyatt Cready ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "keywords": [ + "range", + "parser", + "http" + ], + "repository": "jshttp/range-parser", + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-import": "2.17.2", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "mocha": "6.1.4", + "nyc": "14.1.1" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test" + } +} diff --git a/node_modules/raw-body/HISTORY.md b/node_modules/raw-body/HISTORY.md new file mode 100644 index 00000000..88c79fce --- /dev/null +++ b/node_modules/raw-body/HISTORY.md @@ -0,0 +1,270 @@ +2.4.0 / 2019-04-17 +================== + + * deps: bytes@3.1.0 + - Add petabyte (`pb`) support + * deps: http-errors@1.7.2 + - Set constructor name when possible + - deps: setprototypeof@1.1.1 + - deps: statuses@'>= 1.5.0 < 2' + * deps: iconv-lite@0.4.24 + - Added encoding MIK + +2.3.3 / 2018-05-08 +================== + + * deps: http-errors@1.6.3 + - deps: depd@~1.1.2 + - deps: setprototypeof@1.1.0 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.23 + - Fix loading encoding with year appended + - Fix deprecation warnings on Node.js 10+ + +2.3.2 / 2017-09-09 +================== + + * deps: iconv-lite@0.4.19 + - Fix ISO-8859-1 regression + - Update Windows-1255 + +2.3.1 / 2017-09-07 +================== + + * deps: bytes@3.0.0 + * deps: http-errors@1.6.2 + - deps: depd@1.1.1 + * perf: skip buffer decoding on overage chunk + +2.3.0 / 2017-08-04 +================== + + * Add TypeScript definitions + * Use `http-errors` for standard emitted errors + * deps: bytes@2.5.0 + * deps: iconv-lite@0.4.18 + - Add support for React Native + - Add a warning if not loaded as utf-8 + - Fix CESU-8 decoding in Node.js 8 + - Improve speed of ISO-8859-1 encoding + +2.2.0 / 2017-01-02 +================== + + * deps: iconv-lite@0.4.15 + - Added encoding MS-31J + - Added encoding MS-932 + - Added encoding MS-936 + - Added encoding MS-949 + - Added encoding MS-950 + - Fix GBK/GB18030 handling of Euro character + +2.1.7 / 2016-06-19 +================== + + * deps: bytes@2.4.0 + * perf: remove double-cleanup on happy path + +2.1.6 / 2016-03-07 +================== + + * deps: bytes@2.3.0 + - Drop partial bytes on all parsed units + - Fix parsing byte string that looks like hex + +2.1.5 / 2015-11-30 +================== + + * deps: bytes@2.2.0 + * deps: iconv-lite@0.4.13 + +2.1.4 / 2015-09-27 +================== + + * Fix masking critical errors from `iconv-lite` + * deps: iconv-lite@0.4.12 + - Fix CESU-8 decoding in Node.js 4.x + +2.1.3 / 2015-09-12 +================== + + * Fix sync callback when attaching data listener causes sync read + - Node.js 0.10 compatibility issue + +2.1.2 / 2015-07-05 +================== + + * Fix error stack traces to skip `makeError` + * deps: iconv-lite@0.4.11 + - Add encoding CESU-8 + +2.1.1 / 2015-06-14 +================== + + * Use `unpipe` module for unpiping requests + +2.1.0 / 2015-05-28 +================== + + * deps: iconv-lite@0.4.10 + - Improved UTF-16 endianness detection + - Leading BOM is now removed when decoding + - The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails + +2.0.2 / 2015-05-21 +================== + + * deps: bytes@2.1.0 + - Slight optimizations + +2.0.1 / 2015-05-10 +================== + + * Fix a false-positive when unpiping in Node.js 0.8 + +2.0.0 / 2015-05-08 +================== + + * Return a promise without callback instead of thunk + * deps: bytes@2.0.1 + - units no longer case sensitive when parsing + +1.3.4 / 2015-04-15 +================== + + * Fix hanging callback if request aborts during read + * deps: iconv-lite@0.4.8 + - Add encoding alias UNICODE-1-1-UTF-7 + +1.3.3 / 2015-02-08 +================== + + * deps: iconv-lite@0.4.7 + - Gracefully support enumerables on `Object.prototype` + +1.3.2 / 2015-01-20 +================== + + * deps: iconv-lite@0.4.6 + - Fix rare aliases of single-byte encodings + +1.3.1 / 2014-11-21 +================== + + * deps: iconv-lite@0.4.5 + - Fix Windows-31J and X-SJIS encoding support + +1.3.0 / 2014-07-20 +================== + + * Fully unpipe the stream on error + - Fixes `Cannot switch to old mode now` error on Node.js 0.10+ + +1.2.3 / 2014-07-20 +================== + + * deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + +1.2.2 / 2014-06-19 +================== + + * Send invalid encoding error to callback + +1.2.1 / 2014-06-15 +================== + + * deps: iconv-lite@0.4.3 + - Added encodings UTF-16BE and UTF-16 with BOM + +1.2.0 / 2014-06-13 +================== + + * Passing string as `options` interpreted as encoding + * Support all encodings from `iconv-lite` + +1.1.7 / 2014-06-12 +================== + + * use `string_decoder` module from npm + +1.1.6 / 2014-05-27 +================== + + * check encoding for old streams1 + * support node.js < 0.10.6 + +1.1.5 / 2014-05-14 +================== + + * bump bytes + +1.1.4 / 2014-04-19 +================== + + * allow true as an option + * bump bytes + +1.1.3 / 2014-03-02 +================== + + * fix case when length=null + +1.1.2 / 2013-12-01 +================== + + * be less strict on state.encoding check + +1.1.1 / 2013-11-27 +================== + + * add engines + +1.1.0 / 2013-11-27 +================== + + * add err.statusCode and err.type + * allow for encoding option to be true + * pause the stream instead of dumping on error + * throw if the stream's encoding is set + +1.0.1 / 2013-11-19 +================== + + * dont support streams1, throw if dev set encoding + +1.0.0 / 2013-11-17 +================== + + * rename `expected` option to `length` + +0.2.0 / 2013-11-15 +================== + + * republish + +0.1.1 / 2013-11-15 +================== + + * use bytes + +0.1.0 / 2013-11-11 +================== + + * generator support + +0.0.3 / 2013-10-10 +================== + + * update repo + +0.0.2 / 2013-09-14 +================== + + * dump stream on bad headers + * listen to events after defining received and buffers + +0.0.1 / 2013-09-14 +================== + + * Initial release diff --git a/node_modules/raw-body/LICENSE b/node_modules/raw-body/LICENSE new file mode 100644 index 00000000..d695c8fd --- /dev/null +++ b/node_modules/raw-body/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2013-2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/raw-body/README.md b/node_modules/raw-body/README.md new file mode 100644 index 00000000..2ce79d27 --- /dev/null +++ b/node_modules/raw-body/README.md @@ -0,0 +1,219 @@ +# raw-body + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +Gets the entire buffer of a stream either as a `Buffer` or a string. +Validates the stream's length against an expected length and maximum limit. +Ideal for parsing request bodies. + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install raw-body +``` + +### TypeScript + +This module includes a [TypeScript](https://www.typescriptlang.org/) +declaration file to enable auto complete in compatible editors and type +information for TypeScript projects. This module depends on the Node.js +types, so install `@types/node`: + +```sh +$ npm install @types/node +``` + +## API + + + +```js +var getRawBody = require('raw-body') +``` + +### getRawBody(stream, [options], [callback]) + +**Returns a promise if no callback specified and global `Promise` exists.** + +Options: + +- `length` - The length of the stream. + If the contents of the stream do not add up to this length, + an `400` error code is returned. +- `limit` - The byte limit of the body. + This is the number of bytes or any string format supported by + [bytes](https://www.npmjs.com/package/bytes), + for example `1000`, `'500kb'` or `'3mb'`. + If the body ends up being larger than this limit, + a `413` error code is returned. +- `encoding` - The encoding to use to decode the body into a string. + By default, a `Buffer` instance will be returned when no encoding is specified. + Most likely, you want `utf-8`, so setting `encoding` to `true` will decode as `utf-8`. + You can use any type of encoding supported by [iconv-lite](https://www.npmjs.org/package/iconv-lite#readme). + +You can also pass a string in place of options to just specify the encoding. + +If an error occurs, the stream will be paused, everything unpiped, +and you are responsible for correctly disposing the stream. +For HTTP requests, no handling is required if you send a response. +For streams that use file descriptors, you should `stream.destroy()` or `stream.close()` to prevent leaks. + +## Errors + +This module creates errors depending on the error condition during reading. +The error may be an error from the underlying Node.js implementation, but is +otherwise an error created by this module, which has the following attributes: + + * `limit` - the limit in bytes + * `length` and `expected` - the expected length of the stream + * `received` - the received bytes + * `encoding` - the invalid encoding + * `status` and `statusCode` - the corresponding status code for the error + * `type` - the error type + +### Types + +The errors from this module have a `type` property which allows for the progamatic +determination of the type of error returned. + +#### encoding.unsupported + +This error will occur when the `encoding` option is specified, but the value does +not map to an encoding supported by the [iconv-lite](https://www.npmjs.org/package/iconv-lite#readme) +module. + +#### entity.too.large + +This error will occur when the `limit` option is specified, but the stream has +an entity that is larger. + +#### request.aborted + +This error will occur when the request stream is aborted by the client before +reading the body has finished. + +#### request.size.invalid + +This error will occur when the `length` option is specified, but the stream has +emitted more bytes. + +#### stream.encoding.set + +This error will occur when the given stream has an encoding set on it, making it +a decoded stream. The stream should not have an encoding set and is expected to +emit `Buffer` objects. + +## Examples + +### Simple Express example + +```js +var contentType = require('content-type') +var express = require('express') +var getRawBody = require('raw-body') + +var app = express() + +app.use(function (req, res, next) { + getRawBody(req, { + length: req.headers['content-length'], + limit: '1mb', + encoding: contentType.parse(req).parameters.charset + }, function (err, string) { + if (err) return next(err) + req.text = string + next() + }) +}) + +// now access req.text +``` + +### Simple Koa example + +```js +var contentType = require('content-type') +var getRawBody = require('raw-body') +var koa = require('koa') + +var app = koa() + +app.use(function * (next) { + this.text = yield getRawBody(this.req, { + length: this.req.headers['content-length'], + limit: '1mb', + encoding: contentType.parse(this.req).parameters.charset + }) + yield next +}) + +// now access this.text +``` + +### Using as a promise + +To use this library as a promise, simply omit the `callback` and a promise is +returned, provided that a global `Promise` is defined. + +```js +var getRawBody = require('raw-body') +var http = require('http') + +var server = http.createServer(function (req, res) { + getRawBody(req) + .then(function (buf) { + res.statusCode = 200 + res.end(buf.length + ' bytes submitted') + }) + .catch(function (err) { + res.statusCode = 500 + res.end(err.message) + }) +}) + +server.listen(3000) +``` + +### Using with TypeScript + +```ts +import * as getRawBody from 'raw-body'; +import * as http from 'http'; + +const server = http.createServer((req, res) => { + getRawBody(req) + .then((buf) => { + res.statusCode = 200; + res.end(buf.length + ' bytes submitted'); + }) + .catch((err) => { + res.statusCode = err.statusCode; + res.end(err.message); + }); +}); + +server.listen(3000); +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/raw-body.svg +[npm-url]: https://npmjs.org/package/raw-body +[node-version-image]: https://img.shields.io/node/v/raw-body.svg +[node-version-url]: https://nodejs.org/en/download/ +[travis-image]: https://img.shields.io/travis/stream-utils/raw-body/master.svg +[travis-url]: https://travis-ci.org/stream-utils/raw-body +[coveralls-image]: https://img.shields.io/coveralls/stream-utils/raw-body/master.svg +[coveralls-url]: https://coveralls.io/r/stream-utils/raw-body?branch=master +[downloads-image]: https://img.shields.io/npm/dm/raw-body.svg +[downloads-url]: https://npmjs.org/package/raw-body diff --git a/node_modules/raw-body/index.d.ts b/node_modules/raw-body/index.d.ts new file mode 100644 index 00000000..dcbbebd4 --- /dev/null +++ b/node_modules/raw-body/index.d.ts @@ -0,0 +1,87 @@ +import { Readable } from 'stream'; + +declare namespace getRawBody { + export type Encoding = string | true; + + export interface Options { + /** + * The expected length of the stream. + */ + length?: number | string | null; + /** + * The byte limit of the body. This is the number of bytes or any string + * format supported by `bytes`, for example `1000`, `'500kb'` or `'3mb'`. + */ + limit?: number | string | null; + /** + * The encoding to use to decode the body into a string. By default, a + * `Buffer` instance will be returned when no encoding is specified. Most + * likely, you want `utf-8`, so setting encoding to `true` will decode as + * `utf-8`. You can use any type of encoding supported by `iconv-lite`. + */ + encoding?: Encoding | null; + } + + export interface RawBodyError extends Error { + /** + * The limit in bytes. + */ + limit?: number; + /** + * The expected length of the stream. + */ + length?: number; + expected?: number; + /** + * The received bytes. + */ + received?: number; + /** + * The encoding. + */ + encoding?: string; + /** + * The corresponding status code for the error. + */ + status: number; + statusCode: number; + /** + * The error type. + */ + type: string; + } +} + +/** + * Gets the entire buffer of a stream either as a `Buffer` or a string. + * Validates the stream's length against an expected length and maximum + * limit. Ideal for parsing request bodies. + */ +declare function getRawBody( + stream: Readable, + callback: (err: getRawBody.RawBodyError, body: Buffer) => void +): void; + +declare function getRawBody( + stream: Readable, + options: (getRawBody.Options & { encoding: getRawBody.Encoding }) | getRawBody.Encoding, + callback: (err: getRawBody.RawBodyError, body: string) => void +): void; + +declare function getRawBody( + stream: Readable, + options: getRawBody.Options, + callback: (err: getRawBody.RawBodyError, body: Buffer) => void +): void; + +declare function getRawBody( + stream: Readable, + options: (getRawBody.Options & { encoding: getRawBody.Encoding }) | getRawBody.Encoding +): Promise; + +declare function getRawBody( + stream: Readable, + options?: getRawBody.Options +): Promise; + +export = getRawBody; diff --git a/node_modules/raw-body/index.js b/node_modules/raw-body/index.js new file mode 100644 index 00000000..7fe81860 --- /dev/null +++ b/node_modules/raw-body/index.js @@ -0,0 +1,286 @@ +/*! + * raw-body + * Copyright(c) 2013-2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var createError = require('http-errors') +var iconv = require('iconv-lite') +var unpipe = require('unpipe') + +/** + * Module exports. + * @public + */ + +module.exports = getRawBody + +/** + * Module variables. + * @private + */ + +var ICONV_ENCODING_MESSAGE_REGEXP = /^Encoding not recognized: / + +/** + * Get the decoder for a given encoding. + * + * @param {string} encoding + * @private + */ + +function getDecoder (encoding) { + if (!encoding) return null + + try { + return iconv.getDecoder(encoding) + } catch (e) { + // error getting decoder + if (!ICONV_ENCODING_MESSAGE_REGEXP.test(e.message)) throw e + + // the encoding was not found + throw createError(415, 'specified encoding unsupported', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } +} + +/** + * Get the raw body of a stream (typically HTTP). + * + * @param {object} stream + * @param {object|string|function} [options] + * @param {function} [callback] + * @public + */ + +function getRawBody (stream, options, callback) { + var done = callback + var opts = options || {} + + if (options === true || typeof options === 'string') { + // short cut for encoding + opts = { + encoding: options + } + } + + if (typeof options === 'function') { + done = options + opts = {} + } + + // validate callback is a function, if provided + if (done !== undefined && typeof done !== 'function') { + throw new TypeError('argument callback must be a function') + } + + // require the callback without promises + if (!done && !global.Promise) { + throw new TypeError('argument callback is required') + } + + // get encoding + var encoding = opts.encoding !== true + ? opts.encoding + : 'utf-8' + + // convert the limit to an integer + var limit = bytes.parse(opts.limit) + + // convert the expected length to an integer + var length = opts.length != null && !isNaN(opts.length) + ? parseInt(opts.length, 10) + : null + + if (done) { + // classic callback style + return readStream(stream, encoding, length, limit, done) + } + + return new Promise(function executor (resolve, reject) { + readStream(stream, encoding, length, limit, function onRead (err, buf) { + if (err) return reject(err) + resolve(buf) + }) + }) +} + +/** + * Halt a stream. + * + * @param {Object} stream + * @private + */ + +function halt (stream) { + // unpipe everything from the stream + unpipe(stream) + + // pause stream + if (typeof stream.pause === 'function') { + stream.pause() + } +} + +/** + * Read the data from the stream. + * + * @param {object} stream + * @param {string} encoding + * @param {number} length + * @param {number} limit + * @param {function} callback + * @public + */ + +function readStream (stream, encoding, length, limit, callback) { + var complete = false + var sync = true + + // check the length and limit options. + // note: we intentionally leave the stream paused, + // so users should handle the stream themselves. + if (limit !== null && length !== null && length > limit) { + return done(createError(413, 'request entity too large', { + expected: length, + length: length, + limit: limit, + type: 'entity.too.large' + })) + } + + // streams1: assert request encoding is buffer. + // streams2+: assert the stream encoding is buffer. + // stream._decoder: streams1 + // state.encoding: streams2 + // state.decoder: streams2, specifically < 0.10.6 + var state = stream._readableState + if (stream._decoder || (state && (state.encoding || state.decoder))) { + // developer error + return done(createError(500, 'stream encoding should not be set', { + type: 'stream.encoding.set' + })) + } + + var received = 0 + var decoder + + try { + decoder = getDecoder(encoding) + } catch (err) { + return done(err) + } + + var buffer = decoder + ? '' + : [] + + // attach listeners + stream.on('aborted', onAborted) + stream.on('close', cleanup) + stream.on('data', onData) + stream.on('end', onEnd) + stream.on('error', onEnd) + + // mark sync section complete + sync = false + + function done () { + var args = new Array(arguments.length) + + // copy arguments + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + + // mark complete + complete = true + + if (sync) { + process.nextTick(invokeCallback) + } else { + invokeCallback() + } + + function invokeCallback () { + cleanup() + + if (args[0]) { + // halt the stream on error + halt(stream) + } + + callback.apply(null, args) + } + } + + function onAborted () { + if (complete) return + + done(createError(400, 'request aborted', { + code: 'ECONNABORTED', + expected: length, + length: length, + received: received, + type: 'request.aborted' + })) + } + + function onData (chunk) { + if (complete) return + + received += chunk.length + + if (limit !== null && received > limit) { + done(createError(413, 'request entity too large', { + limit: limit, + received: received, + type: 'entity.too.large' + })) + } else if (decoder) { + buffer += decoder.write(chunk) + } else { + buffer.push(chunk) + } + } + + function onEnd (err) { + if (complete) return + if (err) return done(err) + + if (length !== null && received !== length) { + done(createError(400, 'request size did not match content length', { + expected: length, + length: length, + received: received, + type: 'request.size.invalid' + })) + } else { + var string = decoder + ? buffer + (decoder.end() || '') + : Buffer.concat(buffer) + done(null, string) + } + } + + function cleanup () { + buffer = null + + stream.removeListener('aborted', onAborted) + stream.removeListener('data', onData) + stream.removeListener('end', onEnd) + stream.removeListener('error', onEnd) + stream.removeListener('close', cleanup) + } +} diff --git a/node_modules/raw-body/package.json b/node_modules/raw-body/package.json new file mode 100644 index 00000000..7f1e5af7 --- /dev/null +++ b/node_modules/raw-body/package.json @@ -0,0 +1,48 @@ +{ + "name": "raw-body", + "description": "Get and validate the raw body of a readable stream.", + "version": "2.4.0", + "author": "Jonathan Ong (http://jongleberry.com)", + "contributors": [ + "Douglas Christopher Wilson ", + "Raynos " + ], + "license": "MIT", + "repository": "stream-utils/raw-body", + "dependencies": { + "bytes": "3.1.0", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "devDependencies": { + "bluebird": "3.5.4", + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "istanbul": "0.4.5", + "mocha": "6.1.3", + "readable-stream": "2.3.6", + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.d.ts", + "index.js" + ], + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --trace-deprecation --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --trace-deprecation --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --trace-deprecation --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/rc/LICENSE.APACHE2 b/node_modules/rc/LICENSE.APACHE2 new file mode 100644 index 00000000..6366c047 --- /dev/null +++ b/node_modules/rc/LICENSE.APACHE2 @@ -0,0 +1,15 @@ +Apache License, Version 2.0 + +Copyright (c) 2011 Dominic Tarr + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/rc/LICENSE.BSD b/node_modules/rc/LICENSE.BSD new file mode 100644 index 00000000..96bb796a --- /dev/null +++ b/node_modules/rc/LICENSE.BSD @@ -0,0 +1,26 @@ +Copyright (c) 2013, Dominic Tarr +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The views and conclusions contained in the software and documentation are those +of the authors and should not be interpreted as representing official policies, +either expressed or implied, of the FreeBSD Project. diff --git a/node_modules/rc/LICENSE.MIT b/node_modules/rc/LICENSE.MIT new file mode 100644 index 00000000..6eafbd73 --- /dev/null +++ b/node_modules/rc/LICENSE.MIT @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/rc/README.md b/node_modules/rc/README.md new file mode 100644 index 00000000..e6522e26 --- /dev/null +++ b/node_modules/rc/README.md @@ -0,0 +1,227 @@ +# rc + +The non-configurable configuration loader for lazy people. + +## Usage + +The only option is to pass rc the name of your app, and your default configuration. + +```javascript +var conf = require('rc')(appname, { + //defaults go here. + port: 2468, + + //defaults which are objects will be merged, not replaced + views: { + engine: 'jade' + } +}); +``` + +`rc` will return your configuration options merged with the defaults you specify. +If you pass in a predefined defaults object, it will be mutated: + +```javascript +var conf = {}; +require('rc')(appname, conf); +``` + +If `rc` finds any config files for your app, the returned config object will have +a `configs` array containing their paths: + +```javascript +var appCfg = require('rc')(appname, conf); +appCfg.configs[0] // /etc/appnamerc +appCfg.configs[1] // /home/dominictarr/.config/appname +appCfg.config // same as appCfg.configs[appCfg.configs.length - 1] +``` + +## Standards + +Given your application name (`appname`), rc will look in all the obvious places for configuration. + + * command line arguments, parsed by minimist _(e.g. `--foo baz`, also nested: `--foo.bar=baz`)_ + * environment variables prefixed with `${appname}_` + * or use "\_\_" to indicate nested properties
_(e.g. `appname_foo__bar__baz` => `foo.bar.baz`)_ + * if you passed an option `--config file` then from that file + * a local `.${appname}rc` or the first found looking in `./ ../ ../../ ../../../` etc. + * `$HOME/.${appname}rc` + * `$HOME/.${appname}/config` + * `$HOME/.config/${appname}` + * `$HOME/.config/${appname}/config` + * `/etc/${appname}rc` + * `/etc/${appname}/config` + * the defaults object you passed in. + +All configuration sources that were found will be flattened into one object, +so that sources **earlier** in this list override later ones. + + +## Configuration File Formats + +Configuration files (e.g. `.appnamerc`) may be in either [json](http://json.org/example) or [ini](http://en.wikipedia.org/wiki/INI_file) format. **No** file extension (`.json` or `.ini`) should be used. The example configurations below are equivalent: + + +#### Formatted as `ini` + +``` +; You can include comments in `ini` format if you want. + +dependsOn=0.10.0 + + +; `rc` has built-in support for ini sections, see? + +[commands] + www = ./commands/www + console = ./commands/repl + + +; You can even do nested sections + +[generators.options] + engine = ejs + +[generators.modules] + new = generate-new + engine = generate-backend + +``` + +#### Formatted as `json` + +```javascript +{ + // You can even comment your JSON, if you want + "dependsOn": "0.10.0", + "commands": { + "www": "./commands/www", + "console": "./commands/repl" + }, + "generators": { + "options": { + "engine": "ejs" + }, + "modules": { + "new": "generate-new", + "backend": "generate-backend" + } + } +} +``` + +Comments are stripped from JSON config via [strip-json-comments](https://github.com/sindresorhus/strip-json-comments). + +> Since ini, and env variables do not have a standard for types, your application needs be prepared for strings. + +To ensure that string representations of booleans and numbers are always converted into their proper types (especially useful if you intend to do strict `===` comparisons), consider using a module such as [parse-strings-in-object](https://github.com/anselanza/parse-strings-in-object) to wrap the config object returned from rc. + + +## Simple example demonstrating precedence +Assume you have an application like this (notice the hard-coded defaults passed to rc): +``` +const conf = require('rc')('myapp', { + port: 12345, + mode: 'test' +}); + +console.log(JSON.stringify(conf, null, 2)); +``` +You also have a file `config.json`, with these contents: +``` +{ + "port": 9000, + "foo": "from config json", + "something": "else" +} +``` +And a file `.myapprc` in the same folder, with these contents: +``` +{ + "port": "3001", + "foo": "bar" +} +``` +Here is the expected output from various commands: + +`node .` +``` +{ + "port": "3001", + "mode": "test", + "foo": "bar", + "_": [], + "configs": [ + "/Users/stephen/repos/conftest/.myapprc" + ], + "config": "/Users/stephen/repos/conftest/.myapprc" +} +``` +*Default `mode` from hard-coded object is retained, but port is overridden by `.myapprc` file (automatically found based on appname match), and `foo` is added.* + + +`node . --foo baz` +``` +{ + "port": "3001", + "mode": "test", + "foo": "baz", + "_": [], + "configs": [ + "/Users/stephen/repos/conftest/.myapprc" + ], + "config": "/Users/stephen/repos/conftest/.myapprc" +} +``` +*Same result as above but `foo` is overridden because command-line arguments take precedence over `.myapprc` file.* + +`node . --foo barbar --config config.json` +``` +{ + "port": 9000, + "mode": "test", + "foo": "barbar", + "something": "else", + "_": [], + "config": "config.json", + "configs": [ + "/Users/stephen/repos/conftest/.myapprc", + "config.json" + ] +} +``` +*Now the `port` comes from the `config.json` file specified (overriding the value from `.myapprc`), and `foo` value is overriden by command-line despite also being specified in the `config.json` file.* + + + +## Advanced Usage + +#### Pass in your own `argv` + +You may pass in your own `argv` as the third argument to `rc`. This is in case you want to [use your own command-line opts parser](https://github.com/dominictarr/rc/pull/12). + +```javascript +require('rc')(appname, defaults, customArgvParser); +``` + +## Pass in your own parser + +If you have a special need to use a non-standard parser, +you can do so by passing in the parser as the 4th argument. +(leave the 3rd as null to get the default args parser) + +```javascript +require('rc')(appname, defaults, null, parser); +``` + +This may also be used to force a more strict format, +such as strict, valid JSON only. + +## Note on Performance + +`rc` is running `fs.statSync`-- so make sure you don't use it in a hot code path (e.g. a request handler) + + +## License + +Multi-licensed under the two-clause BSD License, MIT License, or Apache License, version 2.0 diff --git a/node_modules/rc/browser.js b/node_modules/rc/browser.js new file mode 100644 index 00000000..8c230c5c --- /dev/null +++ b/node_modules/rc/browser.js @@ -0,0 +1,7 @@ + +// when this is loaded into the browser, +// just use the defaults... + +module.exports = function (name, defaults) { + return defaults +} diff --git a/node_modules/rc/cli.js b/node_modules/rc/cli.js new file mode 100755 index 00000000..ab05b607 --- /dev/null +++ b/node_modules/rc/cli.js @@ -0,0 +1,4 @@ +#! /usr/bin/env node +var rc = require('./index') + +console.log(JSON.stringify(rc(process.argv[2]), false, 2)) diff --git a/node_modules/rc/index.js b/node_modules/rc/index.js new file mode 100755 index 00000000..65eb47af --- /dev/null +++ b/node_modules/rc/index.js @@ -0,0 +1,53 @@ +var cc = require('./lib/utils') +var join = require('path').join +var deepExtend = require('deep-extend') +var etc = '/etc' +var win = process.platform === "win32" +var home = win + ? process.env.USERPROFILE + : process.env.HOME + +module.exports = function (name, defaults, argv, parse) { + if('string' !== typeof name) + throw new Error('rc(name): name *must* be string') + if(!argv) + argv = require('minimist')(process.argv.slice(2)) + defaults = ( + 'string' === typeof defaults + ? cc.json(defaults) : defaults + ) || {} + + parse = parse || cc.parse + + var env = cc.env(name + '_') + + var configs = [defaults] + var configFiles = [] + function addConfigFile (file) { + if (configFiles.indexOf(file) >= 0) return + var fileConfig = cc.file(file) + if (fileConfig) { + configs.push(parse(fileConfig)) + configFiles.push(file) + } + } + + // which files do we look at? + if (!win) + [join(etc, name, 'config'), + join(etc, name + 'rc')].forEach(addConfigFile) + if (home) + [join(home, '.config', name, 'config'), + join(home, '.config', name), + join(home, '.' + name, 'config'), + join(home, '.' + name + 'rc')].forEach(addConfigFile) + addConfigFile(cc.find('.'+name+'rc')) + if (env.config) addConfigFile(env.config) + if (argv.config) addConfigFile(argv.config) + + return deepExtend.apply(null, configs.concat([ + env, + argv, + configFiles.length ? {configs: configFiles, config: configFiles[configFiles.length - 1]} : undefined, + ])) +} diff --git a/node_modules/rc/lib/utils.js b/node_modules/rc/lib/utils.js new file mode 100644 index 00000000..8b3beffa --- /dev/null +++ b/node_modules/rc/lib/utils.js @@ -0,0 +1,104 @@ +'use strict'; +var fs = require('fs') +var ini = require('ini') +var path = require('path') +var stripJsonComments = require('strip-json-comments') + +var parse = exports.parse = function (content) { + + //if it ends in .json or starts with { then it must be json. + //must be done this way, because ini accepts everything. + //can't just try and parse it and let it throw if it's not ini. + //everything is ini. even json with a syntax error. + + if(/^\s*{/.test(content)) + return JSON.parse(stripJsonComments(content)) + return ini.parse(content) + +} + +var file = exports.file = function () { + var args = [].slice.call(arguments).filter(function (arg) { return arg != null }) + + //path.join breaks if it's a not a string, so just skip this. + for(var i in args) + if('string' !== typeof args[i]) + return + + var file = path.join.apply(null, args) + var content + try { + return fs.readFileSync(file,'utf-8') + } catch (err) { + return + } +} + +var json = exports.json = function () { + var content = file.apply(null, arguments) + return content ? parse(content) : null +} + +var env = exports.env = function (prefix, env) { + env = env || process.env + var obj = {} + var l = prefix.length + for(var k in env) { + if(k.toLowerCase().indexOf(prefix.toLowerCase()) === 0) { + + var keypath = k.substring(l).split('__') + + // Trim empty strings from keypath array + var _emptyStringIndex + while ((_emptyStringIndex=keypath.indexOf('')) > -1) { + keypath.splice(_emptyStringIndex, 1) + } + + var cursor = obj + keypath.forEach(function _buildSubObj(_subkey,i){ + + // (check for _subkey first so we ignore empty strings) + // (check for cursor to avoid assignment to primitive objects) + if (!_subkey || typeof cursor !== 'object') + return + + // If this is the last key, just stuff the value in there + // Assigns actual value from env variable to final key + // (unless it's just an empty string- in that case use the last valid key) + if (i === keypath.length-1) + cursor[_subkey] = env[k] + + + // Build sub-object if nothing already exists at the keypath + if (cursor[_subkey] === undefined) + cursor[_subkey] = {} + + // Increment cursor used to track the object at the current depth + cursor = cursor[_subkey] + + }) + + } + + } + + return obj +} + +var find = exports.find = function () { + var rel = path.join.apply(null, [].slice.call(arguments)) + + function find(start, rel) { + var file = path.join(start, rel) + try { + fs.statSync(file) + return file + } catch (err) { + if(path.dirname(start) !== start) // root + return find(path.dirname(start), rel) + } + } + return find(process.cwd(), rel) +} + + diff --git a/node_modules/rc/node_modules/minimist/.travis.yml b/node_modules/rc/node_modules/minimist/.travis.yml new file mode 100644 index 00000000..74c57bf1 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.12" + - "iojs" +before_install: + - npm install -g npm@~1.4.6 diff --git a/node_modules/rc/node_modules/minimist/LICENSE b/node_modules/rc/node_modules/minimist/LICENSE new file mode 100644 index 00000000..ee27ba4b --- /dev/null +++ b/node_modules/rc/node_modules/minimist/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/rc/node_modules/minimist/example/parse.js b/node_modules/rc/node_modules/minimist/example/parse.js new file mode 100644 index 00000000..abff3e8e --- /dev/null +++ b/node_modules/rc/node_modules/minimist/example/parse.js @@ -0,0 +1,2 @@ +var argv = require('../')(process.argv.slice(2)); +console.dir(argv); diff --git a/node_modules/rc/node_modules/minimist/index.js b/node_modules/rc/node_modules/minimist/index.js new file mode 100644 index 00000000..6a0559d5 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/index.js @@ -0,0 +1,236 @@ +module.exports = function (args, opts) { + if (!opts) opts = {}; + + var flags = { bools : {}, strings : {}, unknownFn: null }; + + if (typeof opts['unknown'] === 'function') { + flags.unknownFn = opts['unknown']; + } + + if (typeof opts['boolean'] === 'boolean' && opts['boolean']) { + flags.allBools = true; + } else { + [].concat(opts['boolean']).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + }); + } + + var aliases = {}; + Object.keys(opts.alias || {}).forEach(function (key) { + aliases[key] = [].concat(opts.alias[key]); + aliases[key].forEach(function (x) { + aliases[x] = [key].concat(aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + + [].concat(opts.string).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + if (aliases[key]) { + flags.strings[aliases[key]] = true; + } + }); + + var defaults = opts['default'] || {}; + + var argv = { _ : [] }; + Object.keys(flags.bools).forEach(function (key) { + setArg(key, defaults[key] === undefined ? false : defaults[key]); + }); + + var notFlags = []; + + if (args.indexOf('--') !== -1) { + notFlags = args.slice(args.indexOf('--')+1); + args = args.slice(0, args.indexOf('--')); + } + + function argDefined(key, arg) { + return (flags.allBools && /^--[^=]+$/.test(arg)) || + flags.strings[key] || flags.bools[key] || aliases[key]; + } + + function setArg (key, val, arg) { + if (arg && flags.unknownFn && !argDefined(key, arg)) { + if (flags.unknownFn(arg) === false) return; + } + + var value = !flags.strings[key] && isNumber(val) + ? Number(val) : val + ; + setKey(argv, key.split('.'), value); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), value); + }); + } + + function setKey (obj, keys, value) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + if (o[key] === undefined) o[key] = {}; + o = o[key]; + }); + + var key = keys[keys.length - 1]; + if (o[key] === undefined || flags.bools[key] || typeof o[key] === 'boolean') { + o[key] = value; + } + else if (Array.isArray(o[key])) { + o[key].push(value); + } + else { + o[key] = [ o[key], value ]; + } + } + + function aliasIsBoolean(key) { + return aliases[key].some(function (x) { + return flags.bools[x]; + }); + } + + for (var i = 0; i < args.length; i++) { + var arg = args[i]; + + if (/^--.+=/.test(arg)) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); + var key = m[1]; + var value = m[2]; + if (flags.bools[key]) { + value = value !== 'false'; + } + setArg(key, value, arg); + } + else if (/^--no-.+/.test(arg)) { + var key = arg.match(/^--no-(.+)/)[1]; + setArg(key, false, arg); + } + else if (/^--.+/.test(arg)) { + var key = arg.match(/^--(.+)/)[1]; + var next = args[i + 1]; + if (next !== undefined && !/^-/.test(next) + && !flags.bools[key] + && !flags.allBools + && (aliases[key] ? !aliasIsBoolean(key) : true)) { + setArg(key, next, arg); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next === 'true', arg); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + else if (/^-[^-]+/.test(arg)) { + var letters = arg.slice(1,-1).split(''); + + var broken = false; + for (var j = 0; j < letters.length; j++) { + var next = arg.slice(j+2); + + if (next === '-') { + setArg(letters[j], next, arg) + continue; + } + + if (/[A-Za-z]/.test(letters[j]) && /=/.test(next)) { + setArg(letters[j], next.split('=')[1], arg); + broken = true; + break; + } + + if (/[A-Za-z]/.test(letters[j]) + && /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) { + setArg(letters[j], next, arg); + broken = true; + break; + } + + if (letters[j+1] && letters[j+1].match(/\W/)) { + setArg(letters[j], arg.slice(j+2), arg); + broken = true; + break; + } + else { + setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg); + } + } + + var key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1]) + && !flags.bools[key] + && (aliases[key] ? !aliasIsBoolean(key) : true)) { + setArg(key, args[i+1], arg); + i++; + } + else if (args[i+1] && /true|false/.test(args[i+1])) { + setArg(key, args[i+1] === 'true', arg); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + } + else { + if (!flags.unknownFn || flags.unknownFn(arg) !== false) { + argv._.push( + flags.strings['_'] || !isNumber(arg) ? arg : Number(arg) + ); + } + if (opts.stopEarly) { + argv._.push.apply(argv._, args.slice(i + 1)); + break; + } + } + } + + Object.keys(defaults).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) { + setKey(argv, key.split('.'), defaults[key]); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), defaults[key]); + }); + } + }); + + if (opts['--']) { + argv['--'] = new Array(); + notFlags.forEach(function(key) { + argv['--'].push(key); + }); + } + else { + notFlags.forEach(function(key) { + argv._.push(key); + }); + } + + return argv; +}; + +function hasKey (obj, keys) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + o = (o[key] || {}); + }); + + var key = keys[keys.length - 1]; + return key in o; +} + +function isNumber (x) { + if (typeof x === 'number') return true; + if (/^0x[0-9a-f]+$/i.test(x)) return true; + return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} + diff --git a/node_modules/rc/node_modules/minimist/package.json b/node_modules/rc/node_modules/minimist/package.json new file mode 100644 index 00000000..326480cb --- /dev/null +++ b/node_modules/rc/node_modules/minimist/package.json @@ -0,0 +1,45 @@ +{ + "name": "minimist", + "version": "1.2.0", + "description": "parse argument options", + "main": "index.js", + "devDependencies": { + "covert": "^1.0.0", + "tap": "~0.4.0", + "tape": "^3.5.0" + }, + "scripts": { + "test": "tap test/*.js", + "coverage": "covert test/*.js" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/6..latest", + "ff/5", + "firefox/latest", + "chrome/10", + "chrome/latest", + "safari/5.1", + "safari/latest", + "opera/12" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/substack/minimist.git" + }, + "homepage": "https://github.com/substack/minimist", + "keywords": [ + "argv", + "getopt", + "parser", + "optimist" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT" +} diff --git a/node_modules/rc/node_modules/minimist/readme.markdown b/node_modules/rc/node_modules/minimist/readme.markdown new file mode 100644 index 00000000..30a74cf8 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/readme.markdown @@ -0,0 +1,91 @@ +# minimist + +parse argument options + +This module is the guts of optimist's argument parser without all the +fanciful decoration. + +[![browser support](https://ci.testling.com/substack/minimist.png)](http://ci.testling.com/substack/minimist) + +[![build status](https://secure.travis-ci.org/substack/minimist.png)](http://travis-ci.org/substack/minimist) + +# example + +``` js +var argv = require('minimist')(process.argv.slice(2)); +console.dir(argv); +``` + +``` +$ node example/parse.js -a beep -b boop +{ _: [], a: 'beep', b: 'boop' } +``` + +``` +$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz +{ _: [ 'foo', 'bar', 'baz' ], + x: 3, + y: 4, + n: 5, + a: true, + b: true, + c: true, + beep: 'boop' } +``` + +# methods + +``` js +var parseArgs = require('minimist') +``` + +## var argv = parseArgs(args, opts={}) + +Return an argument object `argv` populated with the array arguments from `args`. + +`argv._` contains all the arguments that didn't have an option associated with +them. + +Numeric-looking arguments will be returned as numbers unless `opts.string` or +`opts.boolean` is set for that argument name. + +Any arguments after `'--'` will not be parsed and will end up in `argv._`. + +options can be: + +* `opts.string` - a string or array of strings argument names to always treat as +strings +* `opts.boolean` - a boolean, string or array of strings to always treat as +booleans. if `true` will treat all double hyphenated arguments without equal signs +as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`) +* `opts.alias` - an object mapping string names to strings or arrays of string +argument names to use as aliases +* `opts.default` - an object mapping string argument names to default values +* `opts.stopEarly` - when true, populate `argv._` with everything after the +first non-option +* `opts['--']` - when true, populate `argv._` with everything before the `--` +and `argv['--']` with everything after the `--`. Here's an example: +* `opts.unknown` - a function which is invoked with a command line parameter not +defined in the `opts` configuration object. If the function returns `false`, the +unknown option is not added to `argv`. + +``` +> require('./')('one two three -- four five --six'.split(' '), { '--': true }) +{ _: [ 'one', 'two', 'three' ], + '--': [ 'four', 'five', '--six' ] } +``` + +Note that with `opts['--']` set, parsing for arguments still stops after the +`--`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install minimist +``` + +# license + +MIT diff --git a/node_modules/rc/node_modules/minimist/test/all_bool.js b/node_modules/rc/node_modules/minimist/test/all_bool.js new file mode 100644 index 00000000..ac835483 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/all_bool.js @@ -0,0 +1,32 @@ +var parse = require('../'); +var test = require('tape'); + +test('flag boolean true (default all --args to boolean)', function (t) { + var argv = parse(['moo', '--honk', 'cow'], { + boolean: true + }); + + t.deepEqual(argv, { + honk: true, + _: ['moo', 'cow'] + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); + +test('flag boolean true only affects double hyphen arguments without equals signs', function (t) { + var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], { + boolean: true + }); + + t.deepEqual(argv, { + honk: true, + tacos: 'good', + p: 55, + _: ['moo', 'cow'] + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/bool.js b/node_modules/rc/node_modules/minimist/test/bool.js new file mode 100644 index 00000000..14b0717c --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/bool.js @@ -0,0 +1,166 @@ +var parse = require('../'); +var test = require('tape'); + +test('flag boolean default false', function (t) { + var argv = parse(['moo'], { + boolean: ['t', 'verbose'], + default: { verbose: false, t: false } + }); + + t.deepEqual(argv, { + verbose: false, + t: false, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], { + boolean: ['x','y','z'] + }); + + t.deepEqual(argv, { + x : true, + y : false, + z : true, + _ : [ 'one', 'two', 'three' ] + }); + + t.deepEqual(typeof argv.x, 'boolean'); + t.deepEqual(typeof argv.y, 'boolean'); + t.deepEqual(typeof argv.z, 'boolean'); + t.end(); +}); +test('boolean and alias with chainable api', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + herp: { alias: 'h', boolean: true } + }; + var aliasedArgv = parse(aliased, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var propertyArgv = parse(regular, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + alias: { 'h': 'herp' }, + boolean: 'herp' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias array with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var alt = [ '--harp', 'derp' ]; + var opts = { + alias: { 'h': ['herp', 'harp'] }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var altPropertyArgv = parse(alt, opts); + var expected = { + harp: true, + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.same(altPropertyArgv, expected); + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = [ '-h', 'true' ]; + var regular = [ '--herp', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function(t) { + var parsed = parse(['--boool', '--other=true'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = parse(['--boool', '--other=false'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); + +test('boolean --boool=true', function (t) { + var parsed = parse(['--boool=true'], { + default: { + boool: false + }, + boolean: ['boool'] + }); + + t.same(parsed.boool, true); + t.end(); +}); + +test('boolean --boool=false', function (t) { + var parsed = parse(['--boool=false'], { + default: { + boool: true + }, + boolean: ['boool'] + }); + + t.same(parsed.boool, false); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/dash.js b/node_modules/rc/node_modules/minimist/test/dash.js new file mode 100644 index 00000000..5a4fa5be --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/dash.js @@ -0,0 +1,31 @@ +var parse = require('../'); +var test = require('tape'); + +test('-', function (t) { + t.plan(5); + t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] }); + t.deepEqual(parse([ '-' ]), { _: [ '-' ] }); + t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] }); + t.deepEqual( + parse([ '-b', '-' ], { boolean: 'b' }), + { b: true, _: [ '-' ] } + ); + t.deepEqual( + parse([ '-s', '-' ], { string: 's' }), + { s: '-', _: [] } + ); +}); + +test('-a -- b', function (t) { + t.plan(3); + t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); +}); + +test('move arguments after the -- into their own `--` array', function(t) { + t.plan(1); + t.deepEqual( + parse([ '--name', 'John', 'before', '--', 'after' ], { '--': true }), + { name: 'John', _: [ 'before' ], '--': [ 'after' ] }); +}); diff --git a/node_modules/rc/node_modules/minimist/test/default_bool.js b/node_modules/rc/node_modules/minimist/test/default_bool.js new file mode 100644 index 00000000..780a3112 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/default_bool.js @@ -0,0 +1,35 @@ +var test = require('tape'); +var parse = require('../'); + +test('boolean default true', function (t) { + var argv = parse([], { + boolean: 'sometrue', + default: { sometrue: true } + }); + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = parse([], { + boolean: 'somefalse', + default: { somefalse: false } + }); + t.equal(argv.somefalse, false); + t.end(); +}); + +test('boolean default to null', function (t) { + var argv = parse([], { + boolean: 'maybe', + default: { maybe: null } + }); + t.equal(argv.maybe, null); + var argv = parse(['--maybe'], { + boolean: 'maybe', + default: { maybe: null } + }); + t.equal(argv.maybe, true); + t.end(); + +}) diff --git a/node_modules/rc/node_modules/minimist/test/dotted.js b/node_modules/rc/node_modules/minimist/test/dotted.js new file mode 100644 index 00000000..d8b3e856 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/dotted.js @@ -0,0 +1,22 @@ +var parse = require('../'); +var test = require('tape'); + +test('dotted alias', function (t) { + var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 22); + t.equal(argv.aa.bb, 22); + t.end(); +}); + +test('dotted default', function (t) { + var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 11); + t.equal(argv.aa.bb, 11); + t.end(); +}); + +test('dotted default with no alias', function (t) { + var argv = parse('', {default: {'a.b': 11}}); + t.equal(argv.a.b, 11); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/kv_short.js b/node_modules/rc/node_modules/minimist/test/kv_short.js new file mode 100644 index 00000000..f813b305 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/kv_short.js @@ -0,0 +1,16 @@ +var parse = require('../'); +var test = require('tape'); + +test('short -k=v' , function (t) { + t.plan(1); + + var argv = parse([ '-b=123' ]); + t.deepEqual(argv, { b: 123, _: [] }); +}); + +test('multi short -k=v' , function (t) { + t.plan(1); + + var argv = parse([ '-a=whatever', '-b=robots' ]); + t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] }); +}); diff --git a/node_modules/rc/node_modules/minimist/test/long.js b/node_modules/rc/node_modules/minimist/test/long.js new file mode 100644 index 00000000..5d3a1e09 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/long.js @@ -0,0 +1,31 @@ +var test = require('tape'); +var parse = require('../'); + +test('long opts', function (t) { + t.deepEqual( + parse([ '--bool' ]), + { bool : true, _ : [] }, + 'long boolean' + ); + t.deepEqual( + parse([ '--pow', 'xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture sp' + ); + t.deepEqual( + parse([ '--pow=xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture eq' + ); + t.deepEqual( + parse([ '--host', 'localhost', '--port', '555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures sp' + ); + t.deepEqual( + parse([ '--host=localhost', '--port=555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures eq' + ); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/num.js b/node_modules/rc/node_modules/minimist/test/num.js new file mode 100644 index 00000000..2cc77f4d --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/num.js @@ -0,0 +1,36 @@ +var parse = require('../'); +var test = require('tape'); + +test('nums', function (t) { + var argv = parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789' + ]); + t.deepEqual(argv, { + x : 1234, + y : 5.67, + z : 1e7, + w : '10f', + hex : 0xdeadbeef, + _ : [ 789 ] + }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv.y, 'number'); + t.deepEqual(typeof argv.z, 'number'); + t.deepEqual(typeof argv.w, 'string'); + t.deepEqual(typeof argv.hex, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); + +test('already a number', function (t) { + var argv = parse([ '-x', 1234, 789 ]); + t.deepEqual(argv, { x : 1234, _ : [ 789 ] }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/parse.js b/node_modules/rc/node_modules/minimist/test/parse.js new file mode 100644 index 00000000..7b4a2a17 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/parse.js @@ -0,0 +1,197 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse args', function (t) { + t.deepEqual( + parse([ '--no-moo' ]), + { moo : false, _ : [] }, + 'no' + ); + t.deepEqual( + parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]), + { v : ['a','b','c'], _ : [] }, + 'multi' + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.deepEqual( + parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek' + ]), + { + c : true, + a : true, + t : true, + s : 'woo', + h : 'awesome', + b : true, + bool : true, + key : 'value', + multi : [ 'quux', 'baz' ], + meep : false, + name : 'meowmers', + _ : [ 'bare', '--not-a-flag', 'eek' ] + } + ); + t.end(); +}); + +test('flag boolean', function (t) { + var argv = parse([ '-t', 'moo' ], { boolean: 't' }); + t.deepEqual(argv, { t : true, _ : [ 'moo' ] }); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], { + boolean: [ 't', 'verbose' ], + default: { verbose: true } + }); + + t.deepEqual(argv, { + verbose: false, + t: true, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('newlines in params' , function (t) { + var args = parse([ '-s', "X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = parse([ "--s=X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + t.end(); +}); + +test('strings' , function (t) { + var s = parse([ '-s', '0001234' ], { string: 's' }).s; + t.equal(s, '0001234'); + t.equal(typeof s, 'string'); + + var x = parse([ '-x', '56' ], { string: 'x' }).x; + t.equal(x, '56'); + t.equal(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = parse([ ' ', ' ' ], { string: '_' })._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('empty strings', function(t) { + var s = parse([ '-s' ], { string: 's' }).s; + t.equal(s, ''); + t.equal(typeof s, 'string'); + + var str = parse([ '--str' ], { string: 'str' }).str; + t.equal(str, ''); + t.equal(typeof str, 'string'); + + var letters = parse([ '-art' ], { + string: [ 'a', 't' ] + }); + + t.equal(letters.a, ''); + t.equal(letters.r, true); + t.equal(letters.t, ''); + + t.end(); +}); + + +test('string and alias', function(t) { + var x = parse([ '--str', '000123' ], { + string: 's', + alias: { s: 'str' } + }); + + t.equal(x.str, '000123'); + t.equal(typeof x.str, 'string'); + t.equal(x.s, '000123'); + t.equal(typeof x.s, 'string'); + + var y = parse([ '-s', '000123' ], { + string: 'str', + alias: { str: 's' } + }); + + t.equal(y.str, '000123'); + t.equal(typeof y.str, 'string'); + t.equal(y.s, '000123'); + t.equal(typeof y.s, 'string'); + t.end(); +}); + +test('slashBreak', function (t) { + t.same( + parse([ '-I/foo/bar/baz' ]), + { I : '/foo/bar/baz', _ : [] } + ); + t.same( + parse([ '-xyz/foo/bar/baz' ]), + { x : true, y : true, z : '/foo/bar/baz', _ : [] } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: 'zoom' } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: [ 'zm', 'zoom' ] } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = parse([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop' + ]); + + t.same(argv.foo, { + bar : 3, + baz : 4, + quux : { + quibble : 5, + o_O : true + } + }); + t.same(argv.beep, { boop : true }); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/parse_modified.js b/node_modules/rc/node_modules/minimist/test/parse_modified.js new file mode 100644 index 00000000..ab620dc5 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/parse_modified.js @@ -0,0 +1,9 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse with modifier functions' , function (t) { + t.plan(1); + + var argv = parse([ '-b', '123' ], { boolean: 'b' }); + t.deepEqual(argv, { b: true, _: [123] }); +}); diff --git a/node_modules/rc/node_modules/minimist/test/short.js b/node_modules/rc/node_modules/minimist/test/short.js new file mode 100644 index 00000000..d513a1c2 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/short.js @@ -0,0 +1,67 @@ +var parse = require('../'); +var test = require('tape'); + +test('numeric short args', function (t) { + t.plan(2); + t.deepEqual(parse([ '-n123' ]), { n: 123, _: [] }); + t.deepEqual( + parse([ '-123', '456' ]), + { 1: true, 2: true, 3: 456, _: [] } + ); +}); + +test('short', function (t) { + t.deepEqual( + parse([ '-b' ]), + { b : true, _ : [] }, + 'short boolean' + ); + t.deepEqual( + parse([ 'foo', 'bar', 'baz' ]), + { _ : [ 'foo', 'bar', 'baz' ] }, + 'bare' + ); + t.deepEqual( + parse([ '-cats' ]), + { c : true, a : true, t : true, s : true, _ : [] }, + 'group' + ); + t.deepEqual( + parse([ '-cats', 'meow' ]), + { c : true, a : true, t : true, s : 'meow', _ : [] }, + 'short group next' + ); + t.deepEqual( + parse([ '-h', 'localhost' ]), + { h : 'localhost', _ : [] }, + 'short capture' + ); + t.deepEqual( + parse([ '-h', 'localhost', '-p', '555' ]), + { h : 'localhost', p : 555, _ : [] }, + 'short captures' + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.deepEqual( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/stop_early.js b/node_modules/rc/node_modules/minimist/test/stop_early.js new file mode 100644 index 00000000..bdf9fbcb --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/stop_early.js @@ -0,0 +1,15 @@ +var parse = require('../'); +var test = require('tape'); + +test('stops parsing on the first non-option when stopEarly is set', function (t) { + var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], { + stopEarly: true + }); + + t.deepEqual(argv, { + aaa: 'bbb', + _: ['ccc', '--ddd'] + }); + + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/unknown.js b/node_modules/rc/node_modules/minimist/test/unknown.js new file mode 100644 index 00000000..462a36bd --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/unknown.js @@ -0,0 +1,102 @@ +var parse = require('../'); +var test = require('tape'); + +test('boolean and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'true', '--derp', 'true' ]; + var regular = [ '--herp', 'true', '-d', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('flag boolean true any double hyphen argument is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var argv = parse(['--honk', '--tacos=good', 'cow', '-p', '55'], { + boolean: true, + unknown: unknownFn + }); + t.same(unknown, ['--tacos=good', 'cow', '-p']); + t.same(argv, { + honk: true, + _: [] + }); + t.end(); +}); + +test('string and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'hello', '--derp', 'goodbye' ]; + var regular = [ '--herp', 'hello', '-d', 'moon' ]; + var opts = { + alias: { h: 'herp' }, + string: 'h', + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('default and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'hello' ]; + var regular = [ '--herp', 'hello' ]; + var opts = { + default: { 'h': 'bar' }, + alias: { 'h': 'herp' }, + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, []); + t.end(); + unknownFn(); // exercise fn for 100% coverage +}); + +test('value following -- is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '--bad', '--', 'good', 'arg' ]; + var opts = { + '--': true, + unknown: unknownFn + }; + var argv = parse(aliased, opts); + + t.same(unknown, ['--bad']); + t.same(argv, { + '--': ['good', 'arg'], + '_': [] + }) + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/whitespace.js b/node_modules/rc/node_modules/minimist/test/whitespace.js new file mode 100644 index 00000000..8a52a58c --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/whitespace.js @@ -0,0 +1,8 @@ +var parse = require('../'); +var test = require('tape'); + +test('whitespace should be whitespace' , function (t) { + t.plan(1); + var x = parse([ '-x', '\t' ]).x; + t.equal(x, '\t'); +}); diff --git a/node_modules/rc/package.json b/node_modules/rc/package.json new file mode 100644 index 00000000..887238fa --- /dev/null +++ b/node_modules/rc/package.json @@ -0,0 +1,29 @@ +{ + "name": "rc", + "version": "1.2.8", + "description": "hardwired configuration loader", + "main": "index.js", + "browser": "browser.js", + "scripts": { + "test": "set -e; node test/test.js; node test/ini.js; node test/nested-env-vars.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/dominictarr/rc.git" + }, + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "keywords": [ + "config", + "rc", + "unix", + "defaults" + ], + "bin": "./cli.js", + "author": "Dominic Tarr (dominictarr.com)", + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + } +} diff --git a/node_modules/rc/test/ini.js b/node_modules/rc/test/ini.js new file mode 100644 index 00000000..e6857f8b --- /dev/null +++ b/node_modules/rc/test/ini.js @@ -0,0 +1,16 @@ +var cc =require('../lib/utils') +var INI = require('ini') +var assert = require('assert') + +function test(obj) { + + var _json, _ini + var json = cc.parse (_json = JSON.stringify(obj)) + var ini = cc.parse (_ini = INI.stringify(obj)) + console.log(_ini, _json) + assert.deepEqual(json, ini) +} + + +test({hello: true}) + diff --git a/node_modules/rc/test/nested-env-vars.js b/node_modules/rc/test/nested-env-vars.js new file mode 100644 index 00000000..0ecd1763 --- /dev/null +++ b/node_modules/rc/test/nested-env-vars.js @@ -0,0 +1,50 @@ + +var seed = Math.random(); +var n = 'rc'+ seed; +var N = 'RC'+ seed; +var assert = require('assert') + + +// Basic usage +process.env[n+'_someOpt__a'] = 42 +process.env[n+'_someOpt__x__'] = 99 +process.env[n+'_someOpt__a__b'] = 186 +process.env[n+'_someOpt__a__b__c'] = 243 +process.env[n+'_someOpt__x__y'] = 1862 +process.env[n+'_someOpt__z'] = 186577 + +// Should ignore empty strings from orphaned '__' +process.env[n+'_someOpt__z__x__'] = 18629 +process.env[n+'_someOpt__w__w__'] = 18629 + +// Leading '__' should ignore everything up to 'z' +process.env[n+'___z__i__'] = 9999 + +// should ignore case for config name section. +process.env[N+'_test_upperCase'] = 187 + +function testPrefix(prefix) { + var config = require('../')(prefix, { + option: true + }) + + console.log('\n\n------ nested-env-vars ------\n',{prefix: prefix}, '\n', config); + + assert.equal(config.option, true) + assert.equal(config.someOpt.a, 42) + assert.equal(config.someOpt.x, 99) + // Should not override `a` once it's been set + assert.equal(config.someOpt.a/*.b*/, 42) + // Should not override `x` once it's been set + assert.equal(config.someOpt.x/*.y*/, 99) + assert.equal(config.someOpt.z, 186577) + // Should not override `z` once it's been set + assert.equal(config.someOpt.z/*.x*/, 186577) + assert.equal(config.someOpt.w.w, 18629) + assert.equal(config.z.i, 9999) + + assert.equal(config.test_upperCase, 187) +} + +testPrefix(n); +testPrefix(N); diff --git a/node_modules/rc/test/test.js b/node_modules/rc/test/test.js new file mode 100644 index 00000000..4f633518 --- /dev/null +++ b/node_modules/rc/test/test.js @@ -0,0 +1,59 @@ + +var n = 'rc'+Math.random() +var assert = require('assert') + +process.env[n+'_envOption'] = 42 + +var config = require('../')(n, { + option: true +}) + +console.log(config) + +assert.equal(config.option, true) +assert.equal(config.envOption, 42) + +var customArgv = require('../')(n, { + option: true +}, { // nopt-like argv + option: false, + envOption: 24, + argv: { + remain: [], + cooked: ['--no-option', '--envOption', '24'], + original: ['--no-option', '--envOption=24'] + } +}) + +console.log(customArgv) + +assert.equal(customArgv.option, false) +assert.equal(customArgv.envOption, 24) + +var fs = require('fs') +var path = require('path') +var jsonrc = path.resolve('.' + n + 'rc'); + +fs.writeFileSync(jsonrc, [ + '{', + '// json overrides default', + '"option": false,', + '/* env overrides json */', + '"envOption": 24', + '}' +].join('\n')); + +var commentedJSON = require('../')(n, { + option: true +}) + +fs.unlinkSync(jsonrc); + +console.log(commentedJSON) + +assert.equal(commentedJSON.option, false) +assert.equal(commentedJSON.envOption, 42) + +assert.equal(commentedJSON.config, jsonrc) +assert.equal(commentedJSON.configs.length, 1) +assert.equal(commentedJSON.configs[0], jsonrc) diff --git a/node_modules/readable-stream/.travis.yml b/node_modules/readable-stream/.travis.yml new file mode 100644 index 00000000..40992555 --- /dev/null +++ b/node_modules/readable-stream/.travis.yml @@ -0,0 +1,55 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 00000000..f478d58d --- /dev/null +++ b/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/readable-stream/GOVERNANCE.md b/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 00000000..16ffb93f --- /dev/null +++ b/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE new file mode 100644 index 00000000..2873b3b2 --- /dev/null +++ b/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md new file mode 100644 index 00000000..23fe3f3e --- /dev/null +++ b/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 00000000..83275f19 --- /dev/null +++ b/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/readable-stream/duplex-browser.js b/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 00000000..f8b2db83 --- /dev/null +++ b/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/readable-stream/duplex.js b/node_modules/readable-stream/duplex.js new file mode 100644 index 00000000..46924cbf --- /dev/null +++ b/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000..a1ca813e --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000..a9c83588 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000..bf34ac65 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000..5d1f8b87 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000..b3f4e85a --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,687 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 00000000..aefc68bd --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,79 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 00000000..5a0a0d88 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,74 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 00000000..9332a3fd --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 00000000..ce2ad5b6 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json new file mode 100644 index 00000000..dbb1da6b --- /dev/null +++ b/node_modules/readable-stream/package.json @@ -0,0 +1,52 @@ +{ + "name": "readable-stream", + "version": "2.3.6", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "scripts": { + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/readable-stream/passthrough.js b/node_modules/readable-stream/passthrough.js new file mode 100644 index 00000000..ffd791d7 --- /dev/null +++ b/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/readable-stream/readable-browser.js b/node_modules/readable-stream/readable-browser.js new file mode 100644 index 00000000..e5037259 --- /dev/null +++ b/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js new file mode 100644 index 00000000..ec89ec53 --- /dev/null +++ b/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/readable-stream/transform.js b/node_modules/readable-stream/transform.js new file mode 100644 index 00000000..b1baba26 --- /dev/null +++ b/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/readable-stream/writable-browser.js b/node_modules/readable-stream/writable-browser.js new file mode 100644 index 00000000..ebdde6a8 --- /dev/null +++ b/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/readable-stream/writable.js b/node_modules/readable-stream/writable.js new file mode 100644 index 00000000..3211a6f8 --- /dev/null +++ b/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/readdirp/LICENSE b/node_modules/readdirp/LICENSE new file mode 100644 index 00000000..8a63b80b --- /dev/null +++ b/node_modules/readdirp/LICENSE @@ -0,0 +1,20 @@ +This software is released under the MIT license: + +Copyright (c) 2012-2015 Thorsten Lorenz + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/readdirp/README.md b/node_modules/readdirp/README.md new file mode 100644 index 00000000..431f4025 --- /dev/null +++ b/node_modules/readdirp/README.md @@ -0,0 +1,204 @@ +# readdirp [![Build Status](https://secure.travis-ci.org/thlorenz/readdirp.svg)](http://travis-ci.org/thlorenz/readdirp) + +[![NPM](https://nodei.co/npm/readdirp.png?downloads=true&stars=true)](https://nodei.co/npm/readdirp/) + +Recursive version of [fs.readdir](http://nodejs.org/docs/latest/api/fs.html#fs_fs_readdir_path_callback). Exposes a **stream api**. + +```javascript +var readdirp = require('readdirp') + , path = require('path') + , es = require('event-stream'); + +// print out all JavaScript files along with their size + +var stream = readdirp({ root: path.join(__dirname), fileFilter: '*.js' }); +stream + .on('warn', function (err) { + console.error('non-fatal error', err); + // optionally call stream.destroy() here in order to abort and cause 'close' to be emitted + }) + .on('error', function (err) { console.error('fatal error', err); }) + .pipe(es.mapSync(function (entry) { + return { path: entry.path, size: entry.stat.size }; + })) + .pipe(es.stringify()) + .pipe(process.stdout); +``` + +Meant to be one of the recursive versions of [fs](http://nodejs.org/docs/latest/api/fs.html) functions, e.g., like [mkdirp](https://github.com/substack/node-mkdirp). + +**Table of Contents** *generated with [DocToc](http://doctoc.herokuapp.com/)* + +- [Installation](#installation) +- [API](#api) + - [entry stream](#entry-stream) + - [options](#options) + - [entry info](#entry-info) + - [Filters](#filters) + - [Callback API](#callback-api) + - [allProcessed ](#allprocessed) + - [fileProcessed](#fileprocessed) +- [More Examples](#more-examples) + - [stream api](#stream-api) + - [stream api pipe](#stream-api-pipe) + - [grep](#grep) + - [using callback api](#using-callback-api) + - [tests](#tests) + + +# Installation + + npm install readdirp + +# API + +***var entryStream = readdirp (options)*** + +Reads given root recursively and returns a `stream` of [entry info](#entry-info)s. + +## entry stream + +Behaves as follows: + +- `emit('data')` passes an [entry info](#entry-info) whenever one is found +- `emit('warn')` passes a non-fatal `Error` that prevents a file/directory from being processed (i.e., if it is + inaccessible to the user) +- `emit('error')` passes a fatal `Error` which also ends the stream (i.e., when illegal options where passed) +- `emit('end')` called when all entries were found and no more will be emitted (i.e., we are done) +- `emit('close')` called when the stream is destroyed via `stream.destroy()` (which could be useful if you want to + manually abort even on a non fatal error) - at that point the stream is no longer `readable` and no more entries, + warning or errors are emitted +- to learn more about streams, consult the very detailed + [nodejs streams documentation](http://nodejs.org/api/stream.html) or the + [stream-handbook](https://github.com/substack/stream-handbook) + + +## options + +- **root**: path in which to start reading and recursing into subdirectories + +- **fileFilter**: filter to include/exclude files found (see [Filters](#filters) for more) + +- **directoryFilter**: filter to include/exclude directories found and to recurse into (see [Filters](#filters) for more) + +- **depth**: depth at which to stop recursing even if more subdirectories are found + +- **entryType**: determines if data events on the stream should be emitted for `'files'`, `'directories'`, `'both'`, or `'all'`. Setting to `'all'` will also include entries for other types of file descriptors like character devices, unix sockets and named pipes. Defaults to `'files'`. + +- **lstat**: if `true`, readdirp uses `fs.lstat` instead of `fs.stat` in order to stat files and includes symlink entries in the stream along with files. + +## entry info + +Has the following properties: + +- **parentDir** : directory in which entry was found (relative to given root) +- **fullParentDir** : full path to parent directory +- **name** : name of the file/directory +- **path** : path to the file/directory (relative to given root) +- **fullPath** : full path to the file/directory found +- **stat** : built in [stat object](http://nodejs.org/docs/v0.4.9/api/fs.html#fs.Stats) +- **Example**: (assuming root was `/User/dev/readdirp`) + + parentDir : 'test/bed/root_dir1', + fullParentDir : '/User/dev/readdirp/test/bed/root_dir1', + name : 'root_dir1_subdir1', + path : 'test/bed/root_dir1/root_dir1_subdir1', + fullPath : '/User/dev/readdirp/test/bed/root_dir1/root_dir1_subdir1', + stat : [ ... ] + +## Filters + +There are three different ways to specify filters for files and directories respectively. + +- **function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry + +- **glob string**: a string (e.g., `*.js`) which is matched using [minimatch](https://github.com/isaacs/minimatch), so go there for more + information. + + Globstars (`**`) are not supported since specifying a recursive pattern for an already recursive function doesn't make sense. + + Negated globs (as explained in the minimatch documentation) are allowed, e.g., `!*.txt` matches everything but text files. + +- **array of glob strings**: either need to be all inclusive or all exclusive (negated) patterns otherwise an error is thrown. + + `[ '*.json', '*.js' ]` includes all JavaScript and Json files. + + + `[ '!.git', '!node_modules' ]` includes all directories except the '.git' and 'node_modules'. + +Directories that do not pass a filter will not be recursed into. + +## Callback API + +Although the stream api is recommended, readdirp also exposes a callback based api. + +***readdirp (options, callback1 [, callback2])*** + +If callback2 is given, callback1 functions as the **fileProcessed** callback, and callback2 as the **allProcessed** callback. + +If only callback1 is given, it functions as the **allProcessed** callback. + +### allProcessed + +- function with err and res parameters, e.g., `function (err, res) { ... }` +- **err**: array of errors that occurred during the operation, **res may still be present, even if errors occurred** +- **res**: collection of file/directory [entry infos](#entry-info) + +### fileProcessed + +- function with [entry info](#entry-info) parameter e.g., `function (entryInfo) { ... }` + + +# More Examples + +`on('error', ..)`, `on('warn', ..)` and `on('end', ..)` handling omitted for brevity + +```javascript +var readdirp = require('readdirp'); + +// Glob file filter +readdirp({ root: './test/bed', fileFilter: '*.js' }) + .on('data', function (entry) { + // do something with each JavaScript file entry + }); + +// Combined glob file filters +readdirp({ root: './test/bed', fileFilter: [ '*.js', '*.json' ] }) + .on('data', function (entry) { + // do something with each JavaScript and Json file entry + }); + +// Combined negated directory filters +readdirp({ root: './test/bed', directoryFilter: [ '!.git', '!*modules' ] }) + .on('data', function (entry) { + // do something with each file entry found outside '.git' or any modules directory + }); + +// Function directory filter +readdirp({ root: './test/bed', directoryFilter: function (di) { return di.name.length === 9; } }) + .on('data', function (entry) { + // do something with each file entry found inside directories whose name has length 9 + }); + +// Limiting depth +readdirp({ root: './test/bed', depth: 1 }) + .on('data', function (entry) { + // do something with each file entry found up to 1 subdirectory deep + }); + +// callback api +readdirp({ root: '.' }, function(fileInfo) { + // do something with file entry here + }, function (err, res) { + // all done, move on or do final step for all file entries here +}); +``` + +Try more examples by following [instructions](https://github.com/paulmillr/readdirp/blob/master/examples/Readme.md) +on how to get going. + +## tests + +The [readdirp tests](https://github.com/paulmillr/readdirp/blob/master/test/readdirp.js) also will give you a good idea on +how things work. + diff --git a/node_modules/readdirp/package.json b/node_modules/readdirp/package.json new file mode 100644 index 00000000..39314a29 --- /dev/null +++ b/node_modules/readdirp/package.json @@ -0,0 +1,50 @@ +{ + "author": "Thorsten Lorenz (thlorenz.com)", + "name": "readdirp", + "description": "Recursive version of fs.readdir with streaming api.", + "version": "2.2.1", + "homepage": "https://github.com/paulmillr/readdirp", + "repository": { + "type": "git", + "url": "git://github.com/paulmillr/readdirp.git" + }, + "engines": { + "node": ">=0.10" + }, + "files": [ + "readdirp.js", + "stream-api.js" + ], + "keywords": [ + "recursive", + "fs", + "stream", + "streams", + "readdir", + "filesystem", + "find", + "filter" + ], + "main": "readdirp.js", + "scripts": { + "test-main": "(cd test && set -e; for t in ./*.js; do node $t; done)", + "test-0.10": "nave use 0.10 npm run test-main", + "test-0.12": "nave use 0.12 npm run test-main", + "test-4": "nave use 4.4 npm run test-main", + "test-6": "nave use 6.2 npm run test-main", + "test-all": "npm run test-main && npm run test-0.10 && npm run test-0.12 && npm run test-4 && npm run test-6", + "test": "npm run test-main" + }, + "dependencies": { + "graceful-fs": "^4.1.11", + "micromatch": "^3.1.10", + "readable-stream": "^2.0.2" + }, + "devDependencies": { + "nave": "^0.5.1", + "proxyquire": "^1.7.9", + "tap": "1.3.2", + "through2": "^2.0.0" + }, + "license": "MIT" +} diff --git a/node_modules/readdirp/readdirp.js b/node_modules/readdirp/readdirp.js new file mode 100644 index 00000000..863bd17b --- /dev/null +++ b/node_modules/readdirp/readdirp.js @@ -0,0 +1,294 @@ +'use strict'; + +var fs = require('graceful-fs') + , path = require('path') + , micromatch = require('micromatch').isMatch + , toString = Object.prototype.toString + ; + + +// Standard helpers +function isFunction (obj) { + return toString.call(obj) === '[object Function]'; +} + +function isString (obj) { + return toString.call(obj) === '[object String]'; +} + +function isUndefined (obj) { + return obj === void 0; +} + +/** + * Main function which ends up calling readdirRec and reads all files and directories in given root recursively. + * @param { Object } opts Options to specify root (start directory), filters and recursion depth + * @param { function } callback1 When callback2 is given calls back for each processed file - function (fileInfo) { ... }, + * when callback2 is not given, it behaves like explained in callback2 + * @param { function } callback2 Calls back once all files have been processed with an array of errors and file infos + * function (err, fileInfos) { ... } + */ +function readdir(opts, callback1, callback2) { + var stream + , handleError + , handleFatalError + , errors = [] + , readdirResult = { + directories: [] + , files: [] + } + , fileProcessed + , allProcessed + , realRoot + , aborted = false + , paused = false + ; + + // If no callbacks were given we will use a streaming interface + if (isUndefined(callback1)) { + var api = require('./stream-api')(); + stream = api.stream; + callback1 = api.processEntry; + callback2 = api.done; + handleError = api.handleError; + handleFatalError = api.handleFatalError; + + stream.on('close', function () { aborted = true; }); + stream.on('pause', function () { paused = true; }); + stream.on('resume', function () { paused = false; }); + } else { + handleError = function (err) { errors.push(err); }; + handleFatalError = function (err) { + handleError(err); + allProcessed(errors, null); + }; + } + + if (isUndefined(opts)){ + handleFatalError(new Error ( + 'Need to pass at least one argument: opts! \n' + + 'https://github.com/paulmillr/readdirp#options' + ) + ); + return stream; + } + + opts.root = opts.root || '.'; + opts.fileFilter = opts.fileFilter || function() { return true; }; + opts.directoryFilter = opts.directoryFilter || function() { return true; }; + opts.depth = typeof opts.depth === 'undefined' ? 999999999 : opts.depth; + opts.entryType = opts.entryType || 'files'; + + var statfn = opts.lstat === true ? fs.lstat.bind(fs) : fs.stat.bind(fs); + + if (isUndefined(callback2)) { + fileProcessed = function() { }; + allProcessed = callback1; + } else { + fileProcessed = callback1; + allProcessed = callback2; + } + + function normalizeFilter (filter) { + + if (isUndefined(filter)) return undefined; + + function isNegated (filters) { + + function negated(f) { + return f.indexOf('!') === 0; + } + + var some = filters.some(negated); + if (!some) { + return false; + } else { + if (filters.every(negated)) { + return true; + } else { + // if we detect illegal filters, bail out immediately + throw new Error( + 'Cannot mix negated with non negated glob filters: ' + filters + '\n' + + 'https://github.com/paulmillr/readdirp#filters' + ); + } + } + } + + // Turn all filters into a function + if (isFunction(filter)) { + + return filter; + + } else if (isString(filter)) { + + return function (entryInfo) { + return micromatch(entryInfo.name, filter.trim()); + }; + + } else if (filter && Array.isArray(filter)) { + + if (filter) filter = filter.map(function (f) { + return f.trim(); + }); + + return isNegated(filter) ? + // use AND to concat multiple negated filters + function (entryInfo) { + return filter.every(function (f) { + return micromatch(entryInfo.name, f); + }); + } + : + // use OR to concat multiple inclusive filters + function (entryInfo) { + return filter.some(function (f) { + return micromatch(entryInfo.name, f); + }); + }; + } + } + + function processDir(currentDir, entries, callProcessed) { + if (aborted) return; + var total = entries.length + , processed = 0 + , entryInfos = [] + ; + + fs.realpath(currentDir, function(err, realCurrentDir) { + if (aborted) return; + if (err) { + handleError(err); + callProcessed(entryInfos); + return; + } + + var relDir = path.relative(realRoot, realCurrentDir); + + if (entries.length === 0) { + callProcessed([]); + } else { + entries.forEach(function (entry) { + + var fullPath = path.join(realCurrentDir, entry) + , relPath = path.join(relDir, entry); + + statfn(fullPath, function (err, stat) { + if (err) { + handleError(err); + } else { + entryInfos.push({ + name : entry + , path : relPath // relative to root + , fullPath : fullPath + + , parentDir : relDir // relative to root + , fullParentDir : realCurrentDir + + , stat : stat + }); + } + processed++; + if (processed === total) callProcessed(entryInfos); + }); + }); + } + }); + } + + function readdirRec(currentDir, depth, callCurrentDirProcessed) { + var args = arguments; + if (aborted) return; + if (paused) { + setImmediate(function () { + readdirRec.apply(null, args); + }) + return; + } + + fs.readdir(currentDir, function (err, entries) { + if (err) { + handleError(err); + callCurrentDirProcessed(); + return; + } + + processDir(currentDir, entries, function(entryInfos) { + + var subdirs = entryInfos + .filter(function (ei) { return ei.stat.isDirectory() && opts.directoryFilter(ei); }); + + subdirs.forEach(function (di) { + if(opts.entryType === 'directories' || opts.entryType === 'both' || opts.entryType === 'all') { + fileProcessed(di); + } + readdirResult.directories.push(di); + }); + + entryInfos + .filter(function(ei) { + var isCorrectType = opts.entryType === 'all' ? + !ei.stat.isDirectory() : ei.stat.isFile() || ei.stat.isSymbolicLink(); + return isCorrectType && opts.fileFilter(ei); + }) + .forEach(function (fi) { + if(opts.entryType === 'files' || opts.entryType === 'both' || opts.entryType === 'all') { + fileProcessed(fi); + } + readdirResult.files.push(fi); + }); + + var pendingSubdirs = subdirs.length; + + // Be done if no more subfolders exist or we reached the maximum desired depth + if(pendingSubdirs === 0 || depth === opts.depth) { + callCurrentDirProcessed(); + } else { + // recurse into subdirs, keeping track of which ones are done + // and call back once all are processed + subdirs.forEach(function (subdir) { + readdirRec(subdir.fullPath, depth + 1, function () { + pendingSubdirs = pendingSubdirs - 1; + if(pendingSubdirs === 0) { + callCurrentDirProcessed(); + } + }); + }); + } + }); + }); + } + + // Validate and normalize filters + try { + opts.fileFilter = normalizeFilter(opts.fileFilter); + opts.directoryFilter = normalizeFilter(opts.directoryFilter); + } catch (err) { + // if we detect illegal filters, bail out immediately + handleFatalError(err); + return stream; + } + + // If filters were valid get on with the show + fs.realpath(opts.root, function(err, res) { + if (err) { + handleFatalError(err); + return stream; + } + + realRoot = res; + readdirRec(opts.root, 0, function () { + // All errors are collected into the errors array + if (errors.length > 0) { + allProcessed(errors, readdirResult); + } else { + allProcessed(null, readdirResult); + } + }); + }); + + return stream; +} + +module.exports = readdir; diff --git a/node_modules/readdirp/stream-api.js b/node_modules/readdirp/stream-api.js new file mode 100644 index 00000000..bffd1a97 --- /dev/null +++ b/node_modules/readdirp/stream-api.js @@ -0,0 +1,98 @@ +'use strict'; + +var stream = require('readable-stream'); +var util = require('util'); + +var Readable = stream.Readable; + +module.exports = ReaddirpReadable; + +util.inherits(ReaddirpReadable, Readable); + +function ReaddirpReadable (opts) { + if (!(this instanceof ReaddirpReadable)) return new ReaddirpReadable(opts); + + opts = opts || {}; + + opts.objectMode = true; + Readable.call(this, opts); + + // backpressure not implemented at this point + this.highWaterMark = Infinity; + + this._destroyed = false; + this._paused = false; + this._warnings = []; + this._errors = []; + + this._pauseResumeErrors(); +} + +var proto = ReaddirpReadable.prototype; + +proto._pauseResumeErrors = function () { + var self = this; + self.on('pause', function () { self._paused = true }); + self.on('resume', function () { + if (self._destroyed) return; + self._paused = false; + + self._warnings.forEach(function (err) { self.emit('warn', err) }); + self._warnings.length = 0; + + self._errors.forEach(function (err) { self.emit('error', err) }); + self._errors.length = 0; + }) +} + +// called for each entry +proto._processEntry = function (entry) { + if (this._destroyed) return; + this.push(entry); +} + +proto._read = function () { } + +proto.destroy = function () { + // when stream is destroyed it will emit nothing further, not even errors or warnings + this.push(null); + this.readable = false; + this._destroyed = true; + this.emit('close'); +} + +proto._done = function () { + this.push(null); +} + +// we emit errors and warnings async since we may handle errors like invalid args +// within the initial event loop before any event listeners subscribed +proto._handleError = function (err) { + var self = this; + setImmediate(function () { + if (self._paused) return self._warnings.push(err); + if (!self._destroyed) self.emit('warn', err); + }); +} + +proto._handleFatalError = function (err) { + var self = this; + setImmediate(function () { + if (self._paused) return self._errors.push(err); + if (!self._destroyed) self.emit('error', err); + }); +} + +function createStreamAPI () { + var stream = new ReaddirpReadable(); + + return { + stream : stream + , processEntry : stream._processEntry.bind(stream) + , done : stream._done.bind(stream) + , handleError : stream._handleError.bind(stream) + , handleFatalError : stream._handleFatalError.bind(stream) + }; +} + +module.exports = createStreamAPI; diff --git a/node_modules/regex-not/LICENSE b/node_modules/regex-not/LICENSE new file mode 100644 index 00000000..8ee09d91 --- /dev/null +++ b/node_modules/regex-not/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016, 2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/regex-not/README.md b/node_modules/regex-not/README.md new file mode 100644 index 00000000..24d00e7d --- /dev/null +++ b/node_modules/regex-not/README.md @@ -0,0 +1,133 @@ +# regex-not [![NPM version](https://img.shields.io/npm/v/regex-not.svg?style=flat)](https://www.npmjs.com/package/regex-not) [![NPM monthly downloads](https://img.shields.io/npm/dm/regex-not.svg?style=flat)](https://npmjs.org/package/regex-not) [![NPM total downloads](https://img.shields.io/npm/dt/regex-not.svg?style=flat)](https://npmjs.org/package/regex-not) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/regex-not.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/regex-not) + +> Create a javascript regular expression for matching everything except for the given string. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save regex-not +``` + +## Usage + +```js +var not = require('regex-not'); +``` + +The main export is a function that takes a string an options object. + +```js +not(string[, options]); +``` + +**Example** + +```js +var not = require('regex-not'); +console.log(not('foo')); +//=> /^(?:(?!^(?:foo)$).)+$/ +``` + +**Strict matching** + +By default, the returned regex is for strictly (not) matching the exact given pattern (in other words, "match this string if it does NOT _exactly equal_ `foo`"): + +```js +var re = not('foo'); +console.log(re.test('foo')); //=> false +console.log(re.test('bar')); //=> true +console.log(re.test('foobar')); //=> true +console.log(re.test('barfoo')); //=> true +``` + +### .create + +Returns a string to allow you to create your own regex: + +```js +console.log(not.create('foo')); +//=> '(?:(?!^(?:foo)$).)+' +``` + +### Options + +**options.contains** + +You can relax strict matching by setting `options.contains` to true (in other words, "match this string if it does NOT _contain_ `foo`"): + +```js +var re = not('foo'); +console.log(re.test('foo', {contains: true})); //=> false +console.log(re.test('bar', {contains: true})); //=> true +console.log(re.test('foobar', {contains: true})); //=> false +console.log(re.test('barfoo', {contains: true})); //=> false +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [regex-cache](https://www.npmjs.com/package/regex-cache): Memoize the results of a call to the RegExp constructor, avoiding repetitious runtime compilation of… [more](https://github.com/jonschlinkert/regex-cache) | [homepage](https://github.com/jonschlinkert/regex-cache "Memoize the results of a call to the RegExp constructor, avoiding repetitious runtime compilation of the same string and options, resulting in surprising performance improvements.") +* [to-regex](https://www.npmjs.com/package/to-regex): Generate a regex from a string or array of strings. | [homepage](https://github.com/jonschlinkert/to-regex "Generate a regex from a string or array of strings.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 9 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [doowb](https://github.com/doowb) | +| 1 | [EdwardBetts](https://github.com/EdwardBetts) | + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on February 19, 2018._ \ No newline at end of file diff --git a/node_modules/regex-not/index.js b/node_modules/regex-not/index.js new file mode 100644 index 00000000..02bfed4a --- /dev/null +++ b/node_modules/regex-not/index.js @@ -0,0 +1,72 @@ +'use strict'; + +var extend = require('extend-shallow'); +var safe = require('safe-regex'); + +/** + * The main export is a function that takes a `pattern` string and an `options` object. + * + * ```js + & var not = require('regex-not'); + & console.log(not('foo')); + & //=> /^(?:(?!^(?:foo)$).)*$/ + * ``` + * + * @param {String} `pattern` + * @param {Object} `options` + * @return {RegExp} Converts the given `pattern` to a regex using the specified `options`. + * @api public + */ + +function toRegex(pattern, options) { + return new RegExp(toRegex.create(pattern, options)); +} + +/** + * Create a regex-compatible string from the given `pattern` and `options`. + * + * ```js + & var not = require('regex-not'); + & console.log(not.create('foo')); + & //=> '^(?:(?!^(?:foo)$).)*$' + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {String} + * @api public + */ + +toRegex.create = function(pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('expected a string'); + } + + var opts = extend({}, options); + if (opts.contains === true) { + opts.strictNegate = false; + } + + var open = opts.strictOpen !== false ? '^' : ''; + var close = opts.strictClose !== false ? '$' : ''; + var endChar = opts.endChar ? opts.endChar : '+'; + var str = pattern; + + if (opts.strictNegate === false) { + str = '(?:(?!(?:' + pattern + ')).)' + endChar; + } else { + str = '(?:(?!^(?:' + pattern + ')$).)' + endChar; + } + + var res = open + str + close; + if (opts.safe === true && safe(res) === false) { + throw new Error('potentially unsafe regular expression: ' + res); + } + + return res; +}; + +/** + * Expose `toRegex` + */ + +module.exports = toRegex; diff --git a/node_modules/regex-not/node_modules/extend-shallow/LICENSE b/node_modules/regex-not/node_modules/extend-shallow/LICENSE new file mode 100644 index 00000000..99c93691 --- /dev/null +++ b/node_modules/regex-not/node_modules/extend-shallow/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, 2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/regex-not/node_modules/extend-shallow/README.md b/node_modules/regex-not/node_modules/extend-shallow/README.md new file mode 100644 index 00000000..dee226f4 --- /dev/null +++ b/node_modules/regex-not/node_modules/extend-shallow/README.md @@ -0,0 +1,97 @@ +# extend-shallow [![NPM version](https://img.shields.io/npm/v/extend-shallow.svg?style=flat)](https://www.npmjs.com/package/extend-shallow) [![NPM monthly downloads](https://img.shields.io/npm/dm/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![NPM total downloads](https://img.shields.io/npm/dt/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/extend-shallow.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/extend-shallow) + +> Extend an object with the properties of additional objects. node.js/javascript util. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save extend-shallow +``` + +## Usage + +```js +var extend = require('extend-shallow'); + +extend({a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +Pass an empty object to shallow clone: + +```js +var obj = {}; +extend(obj, {a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [for-in](https://www.npmjs.com/package/for-in): Iterate over the own and inherited enumerable properties of an object, and return an object… [more](https://github.com/jonschlinkert/for-in) | [homepage](https://github.com/jonschlinkert/for-in "Iterate over the own and inherited enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js") +* [for-own](https://www.npmjs.com/package/for-own): Iterate over the own enumerable properties of an object, and return an object with properties… [more](https://github.com/jonschlinkert/for-own) | [homepage](https://github.com/jonschlinkert/for-own "Iterate over the own enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 33 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [pdehaan](https://github.com/pdehaan) | + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on November 19, 2017._ \ No newline at end of file diff --git a/node_modules/regex-not/node_modules/extend-shallow/index.js b/node_modules/regex-not/node_modules/extend-shallow/index.js new file mode 100644 index 00000000..c9582f8f --- /dev/null +++ b/node_modules/regex-not/node_modules/extend-shallow/index.js @@ -0,0 +1,60 @@ +'use strict'; + +var isExtendable = require('is-extendable'); +var assignSymbols = require('assign-symbols'); + +module.exports = Object.assign || function(obj/*, objects*/) { + if (obj === null || typeof obj === 'undefined') { + throw new TypeError('Cannot convert undefined or null to object'); + } + if (!isObject(obj)) { + obj = {}; + } + for (var i = 1; i < arguments.length; i++) { + var val = arguments[i]; + if (isString(val)) { + val = toObject(val); + } + if (isObject(val)) { + assign(obj, val); + assignSymbols(obj, val); + } + } + return obj; +}; + +function assign(a, b) { + for (var key in b) { + if (hasOwn(b, key)) { + a[key] = b[key]; + } + } +} + +function isString(val) { + return (val && typeof val === 'string'); +} + +function toObject(str) { + var obj = {}; + for (var i in str) { + obj[i] = str[i]; + } + return obj; +} + +function isObject(val) { + return (val && typeof val === 'object') || isExtendable(val); +} + +/** + * Returns true if the given `key` is an own property of `obj`. + */ + +function hasOwn(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +function isEnum(obj, key) { + return Object.prototype.propertyIsEnumerable.call(obj, key); +} diff --git a/node_modules/regex-not/node_modules/extend-shallow/package.json b/node_modules/regex-not/node_modules/extend-shallow/package.json new file mode 100644 index 00000000..e5e91053 --- /dev/null +++ b/node_modules/regex-not/node_modules/extend-shallow/package.json @@ -0,0 +1,83 @@ +{ + "name": "extend-shallow", + "description": "Extend an object with the properties of additional objects. node.js/javascript util.", + "version": "3.0.2", + "homepage": "https://github.com/jonschlinkert/extend-shallow", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Peter deHaan (http://about.me/peterdehaan)" + ], + "repository": "jonschlinkert/extend-shallow", + "bugs": { + "url": "https://github.com/jonschlinkert/extend-shallow/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "devDependencies": { + "array-slice": "^1.0.0", + "benchmarked": "^2.0.0", + "for-own": "^1.0.0", + "gulp-format-md": "^1.0.0", + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.1", + "minimist": "^1.2.0", + "mocha": "^3.5.3", + "object-assign": "^4.1.1" + }, + "keywords": [ + "assign", + "clone", + "extend", + "merge", + "obj", + "object", + "object-assign", + "object.assign", + "prop", + "properties", + "property", + "props", + "shallow", + "util", + "utility", + "utils", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "related": { + "list": [ + "extend-shallow", + "for-in", + "for-own", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/regex-not/node_modules/is-extendable/LICENSE b/node_modules/regex-not/node_modules/is-extendable/LICENSE new file mode 100644 index 00000000..c0d7f136 --- /dev/null +++ b/node_modules/regex-not/node_modules/is-extendable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/regex-not/node_modules/is-extendable/README.md b/node_modules/regex-not/node_modules/is-extendable/README.md new file mode 100644 index 00000000..875b56a7 --- /dev/null +++ b/node_modules/regex-not/node_modules/is-extendable/README.md @@ -0,0 +1,88 @@ +# is-extendable [![NPM version](https://img.shields.io/npm/v/is-extendable.svg?style=flat)](https://www.npmjs.com/package/is-extendable) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![NPM total downloads](https://img.shields.io/npm/dt/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-extendable.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-extendable) + +> Returns true if a value is a plain object, array or function. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extendable +``` + +## Usage + +```js +var isExtendable = require('is-extendable'); +``` + +Returns true if the value is any of the following: + +* array +* plain object +* function + +## Notes + +All objects in JavaScript can have keys, but it's a pain to check for this, since we ether need to verify that the value is not `null` or `undefined` and: + +* the value is not a primitive, or +* that the object is a plain object, function or array + +Also note that an `extendable` object is not the same as an [extensible object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/isExtensible), which is one that (in es6) is not sealed, frozen, or marked as non-extensible using `preventExtensions`. + +## Release history + +### v1.0.0 - 2017/07/20 + +**Breaking changes** + +* No longer considers date, regex or error objects to be extendable + +## About + +### Related projects + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target… [more](https://github.com/jonschlinkert/assign-deep) | [homepage](https://github.com/jonschlinkert/assign-deep "Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target (first) object.") +* [is-equal-shallow](https://www.npmjs.com/package/is-equal-shallow): Does a shallow comparison of two objects, returning false if the keys or values differ. | [homepage](https://github.com/jonschlinkert/is-equal-shallow "Does a shallow comparison of two objects, returning false if the keys or values differ.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 20, 2017._ \ No newline at end of file diff --git a/node_modules/regex-not/node_modules/is-extendable/index.d.ts b/node_modules/regex-not/node_modules/is-extendable/index.d.ts new file mode 100644 index 00000000..b96d5075 --- /dev/null +++ b/node_modules/regex-not/node_modules/is-extendable/index.d.ts @@ -0,0 +1,5 @@ +export = isExtendable; + +declare function isExtendable(val: any): boolean; + +declare namespace isExtendable {} diff --git a/node_modules/regex-not/node_modules/is-extendable/index.js b/node_modules/regex-not/node_modules/is-extendable/index.js new file mode 100644 index 00000000..a8b26ad0 --- /dev/null +++ b/node_modules/regex-not/node_modules/is-extendable/index.js @@ -0,0 +1,14 @@ +/*! + * is-extendable + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isPlainObject = require('is-plain-object'); + +module.exports = function isExtendable(val) { + return isPlainObject(val) || typeof val === 'function' || Array.isArray(val); +}; diff --git a/node_modules/regex-not/node_modules/is-extendable/package.json b/node_modules/regex-not/node_modules/is-extendable/package.json new file mode 100644 index 00000000..2aaab65a --- /dev/null +++ b/node_modules/regex-not/node_modules/is-extendable/package.json @@ -0,0 +1,67 @@ +{ + "name": "is-extendable", + "description": "Returns true if a value is a plain object, array or function.", + "version": "1.0.1", + "homepage": "https://github.com/jonschlinkert/is-extendable", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extendable", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extendable/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "index.d.ts" + ], + "main": "index.js", + "types": "index.d.ts", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.4.2" + }, + "keywords": [ + "array", + "assign", + "check", + "date", + "extend", + "extendable", + "extensible", + "function", + "is", + "object", + "regex", + "test" + ], + "verb": { + "related": { + "list": [ + "assign-deep", + "is-equal-shallow", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/regex-not/package.json b/node_modules/regex-not/package.json new file mode 100644 index 00000000..0320d532 --- /dev/null +++ b/node_modules/regex-not/package.json @@ -0,0 +1,63 @@ +{ + "name": "regex-not", + "description": "Create a javascript regular expression for matching everything except for the given string.", + "version": "1.0.2", + "homepage": "https://github.com/jonschlinkert/regex-not", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/regex-not", + "bugs": { + "url": "https://github.com/jonschlinkert/regex-not/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "extend-shallow": "^3.0.2", + "safe-regex": "^1.1.0" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "exec", + "match", + "negate", + "negation", + "not", + "regex", + "regular expression", + "test" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "regex-cache", + "to-regex" + ] + }, + "reflinks": [ + "verb", + "verb-generate-readme" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/registry-auth-token/.npmignore b/node_modules/registry-auth-token/.npmignore new file mode 100644 index 00000000..41960284 --- /dev/null +++ b/node_modules/registry-auth-token/.npmignore @@ -0,0 +1,6 @@ +.editorconfig +.eslintignore +.eslintrc +.travis.yml +npm-debug.log +coverage diff --git a/node_modules/registry-auth-token/CHANGELOG.md b/node_modules/registry-auth-token/CHANGELOG.md new file mode 100644 index 00000000..20e82e87 --- /dev/null +++ b/node_modules/registry-auth-token/CHANGELOG.md @@ -0,0 +1,112 @@ +# Change Log + +All notable changes will be documented in this file. + +## [3.4.0] - 2019-03-20 + +### Changes + +- Enabled legacy auth token to be read from environment variable (Martin Flodin) + +## [3.3.2] - 2018-01-26 + +### Changes + +- Support password with ENV variable tokens (Nowell Strite) + +## [3.3.1] - 2017-05-02 + +### Fixes + +- Auth legacy token is basic auth (Hutson Betts) + +## [3.3.0] - 2017-04-24 + +### Changes + +- Support legacy auth token config key (Zoltan Kochan) +- Use safe-buffer module for backwards-compatible base64 encoding/decoding (Espen Hovlandsdal) +- Change to standard.js coding style (Espen Hovlandsdal) + +## [3.2.0] - 2017-04-20 + +### Changes + +- Allow passing parsed npmrc from outside (Zoltan Kochan) + +## [3.1.2] - 2017-04-07 + +### Changes + +- Avoid infinite loop on invalid URL (Zoltan Kochan) + +## [3.1.1] - 2017-04-06 + +### Changes + +- Nerf-dart URLs even if recursive is set to false (Espen Hovlandsdal) + +## [3.1.0] - 2016-10-19 + +### Changes + +- Return the password and username for Basic authorization (Zoltan Kochan) + +## [3.0.1] - 2016-08-07 + +### Changes + +- Fix recursion bug (Lukas Eipert) +- Implement alternative base64 encoding/decoding implementation for Node 6 (Lukas Eipert) + +## [3.0.0] - 2016-08-04 + +### Added + +- Support for Basic Authentication (username/password) (Lukas Eipert) + +### Changes + +- The result format of the output changed from a simple string to an object which contains the token type + +```js + // before: returns 'tokenString' + // after: returns {token: 'tokenString', type: 'Bearer'} + getAuthToken() +``` + +## [2.1.1] - 2016-07-10 + +### Changes + +- Fix infinite loop when recursively resolving registry URLs on Windows (Espen Hovlandsdal) + +## [2.1.0] - 2016-07-07 + +### Added + +- Add feature to find configured registry URL for a scope (Espen Hovlandsdal) + +## [2.0.0] - 2016-06-17 + +### Changes + +- Fix tokens defined by reference to environment variables (Dan MacTough) + +## [1.1.1] - 2016-04-26 + +### Changes + +- Fix for registries with port number in URL (Ryan Day) + +[1.1.1]: https://github.com/rexxars/registry-auth-token/compare/a5b4fe2f5ff982110eb8a813ba1b3b3c5d851af1...v1.1.1 +[2.0.0]: https://github.com/rexxars/registry-auth-token/compare/v1.1.1...v2.0.0 +[2.1.0]: https://github.com/rexxars/registry-auth-token/compare/v2.0.0...v2.1.0 +[2.1.1]: https://github.com/rexxars/registry-auth-token/compare/v2.1.0...v2.1.1 +[3.0.0]: https://github.com/rexxars/registry-auth-token/compare/v2.1.1...v3.0.0 +[3.0.1]: https://github.com/rexxars/registry-auth-token/compare/v3.0.0...v3.0.1 +[3.1.0]: https://github.com/rexxars/registry-auth-token/compare/v3.0.1...v3.1.0 +[3.1.1]: https://github.com/rexxars/registry-auth-token/compare/v3.1.0...v3.1.1 +[3.1.2]: https://github.com/rexxars/registry-auth-token/compare/v3.1.1...v3.1.2 +[3.2.0]: https://github.com/rexxars/registry-auth-token/compare/v3.1.2...v3.2.0 +[3.3.0]: https://github.com/rexxars/registry-auth-token/compare/v3.2.0...v3.3.0 diff --git a/node_modules/registry-auth-token/LICENSE b/node_modules/registry-auth-token/LICENSE new file mode 100644 index 00000000..0de12e33 --- /dev/null +++ b/node_modules/registry-auth-token/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Espen Hovlandsdal + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/registry-auth-token/README.md b/node_modules/registry-auth-token/README.md new file mode 100644 index 00000000..5ac9c1ab --- /dev/null +++ b/node_modules/registry-auth-token/README.md @@ -0,0 +1,65 @@ +# registry-auth-token + +[![npm version](http://img.shields.io/npm/v/registry-auth-token.svg?style=flat-square)](http://browsenpm.org/package/registry-auth-token)[![Build Status](http://img.shields.io/travis/rexxars/registry-auth-token/master.svg?style=flat-square)](https://travis-ci.org/rexxars/registry-auth-token) + +Get the auth token set for an npm registry from `.npmrc`. Also allows fetching the configured registry URL for a given npm scope. + +## Installing + +``` +npm install --save registry-auth-token +``` + +## Usage + +Returns an object containing `token` and `type`, or `undefined` if no token can be found. `type` can be either `Bearer` or `Basic`. + +```js +var getAuthToken = require('registry-auth-token') +var getRegistryUrl = require('registry-auth-token/registry-url') + +// Get auth token and type for default `registry` set in `.npmrc` +console.log(getAuthToken()) // {token: 'someToken', type: 'Bearer'} + +// Get auth token for a specific registry URL +console.log(getAuthToken('//registry.foo.bar')) + +// Find the registry auth token for a given URL (with deep path): +// If registry is at `//some.host/registry` +// URL passed is `//some.host/registry/deep/path` +// Will find token the closest matching path; `//some.host/registry` +console.log(getAuthToken('//some.host/registry/deep/path', {recursive: true})) + +// Find the configured registry url for scope `@foobar`. +// Falls back to the global registry if not defined. +console.log(getRegistryUrl('@foobar')) + +// Use the npm config that is passed in +console.log(getRegistryUrl('http://registry.foobar.eu/', { + npmrc: { + 'registry': 'http://registry.foobar.eu/', + '//registry.foobar.eu/:_authToken': 'qar' + } +})) +``` + +## Return value + +```js +// If auth info can be found: +{token: 'someToken', type: 'Bearer'} + +// Or: +{token: 'someOtherToken', type: 'Basic'} + +// Or, if nothing is found: +undefined +``` + +## Security + +Please be careful when using this. Leaking your auth token is dangerous. + +## License + +MIT-licensed. See LICENSE. diff --git a/node_modules/registry-auth-token/base64.js b/node_modules/registry-auth-token/base64.js new file mode 100644 index 00000000..d208ae18 --- /dev/null +++ b/node_modules/registry-auth-token/base64.js @@ -0,0 +1,14 @@ +const safeBuffer = require('safe-buffer').Buffer + +function decodeBase64 (base64) { + return safeBuffer.from(base64, 'base64').toString('utf8') +} + +function encodeBase64 (string) { + return safeBuffer.from(string, 'utf8').toString('base64') +} + +module.exports = { + decodeBase64: decodeBase64, + encodeBase64: encodeBase64 +} diff --git a/node_modules/registry-auth-token/index.js b/node_modules/registry-auth-token/index.js new file mode 100644 index 00000000..f8c6216e --- /dev/null +++ b/node_modules/registry-auth-token/index.js @@ -0,0 +1,123 @@ +var url = require('url') +var base64 = require('./base64') + +var decodeBase64 = base64.decodeBase64 +var encodeBase64 = base64.encodeBase64 + +var tokenKey = ':_authToken' +var userKey = ':username' +var passwordKey = ':_password' + +module.exports = function () { + var checkUrl + var options + if (arguments.length >= 2) { + checkUrl = arguments[0] + options = arguments[1] + } else if (typeof arguments[0] === 'string') { + checkUrl = arguments[0] + } else { + options = arguments[0] + } + options = options || {} + options.npmrc = options.npmrc || require('rc')('npm', {registry: 'https://registry.npmjs.org/'}) + checkUrl = checkUrl || options.npmrc.registry + return getRegistryAuthInfo(checkUrl, options) || getLegacyAuthInfo(options.npmrc) +} + +function getRegistryAuthInfo (checkUrl, options) { + var parsed = url.parse(checkUrl, false, true) + var pathname + + while (pathname !== '/' && parsed.pathname !== pathname) { + pathname = parsed.pathname || '/' + + var regUrl = '//' + parsed.host + pathname.replace(/\/$/, '') + var authInfo = getAuthInfoForUrl(regUrl, options.npmrc) + if (authInfo) { + return authInfo + } + + // break if not recursive + if (!options.recursive) { + return /\/$/.test(checkUrl) + ? undefined + : getRegistryAuthInfo(url.resolve(checkUrl, '.'), options) + } + + parsed.pathname = url.resolve(normalizePath(pathname), '..') || '/' + } + + return undefined +} + +function getLegacyAuthInfo (npmrc) { + if (!npmrc._auth) { + return undefined + } + + var token = replaceEnvironmentVariable(npmrc._auth) + + return {token: token, type: 'Basic'} +} + +function normalizePath (path) { + return path[path.length - 1] === '/' ? path : path + '/' +} + +function getAuthInfoForUrl (regUrl, npmrc) { + // try to get bearer token + var bearerAuth = getBearerToken(npmrc[regUrl + tokenKey] || npmrc[regUrl + '/' + tokenKey]) + if (bearerAuth) { + return bearerAuth + } + + // try to get basic token + var username = npmrc[regUrl + userKey] || npmrc[regUrl + '/' + userKey] + var password = npmrc[regUrl + passwordKey] || npmrc[regUrl + '/' + passwordKey] + var basicAuth = getTokenForUsernameAndPassword(username, password) + if (basicAuth) { + return basicAuth + } + + return undefined +} + +function replaceEnvironmentVariable (token) { + return token.replace(/^\$\{?([^}]*)\}?$/, function (fullMatch, envVar) { + return process.env[envVar] + }) +} + +function getBearerToken (tok) { + if (!tok) { + return undefined + } + + // check if bearer token is set as environment variable + var token = replaceEnvironmentVariable(tok) + + return {token: token, type: 'Bearer'} +} + +function getTokenForUsernameAndPassword (username, password) { + if (!username || !password) { + return undefined + } + + // passwords are base64 encoded, so we need to decode it + // See https://github.com/npm/npm/blob/v3.10.6/lib/config/set-credentials-by-uri.js#L26 + var pass = decodeBase64(replaceEnvironmentVariable(password)) + + // a basic auth token is base64 encoded 'username:password' + // See https://github.com/npm/npm/blob/v3.10.6/lib/config/get-credentials-by-uri.js#L70 + var token = encodeBase64(username + ':' + pass) + + // we found a basicToken token so let's exit the loop + return { + token: token, + type: 'Basic', + password: pass, + username: username + } +} diff --git a/node_modules/registry-auth-token/node_modules/.bin/rc b/node_modules/registry-auth-token/node_modules/.bin/rc new file mode 120000 index 00000000..e63a3f2a --- /dev/null +++ b/node_modules/registry-auth-token/node_modules/.bin/rc @@ -0,0 +1 @@ +../../../rc/cli.js \ No newline at end of file diff --git a/node_modules/registry-auth-token/node_modules/safe-buffer/LICENSE b/node_modules/registry-auth-token/node_modules/safe-buffer/LICENSE new file mode 100644 index 00000000..0c068cee --- /dev/null +++ b/node_modules/registry-auth-token/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/registry-auth-token/node_modules/safe-buffer/README.md b/node_modules/registry-auth-token/node_modules/safe-buffer/README.md new file mode 100644 index 00000000..356e3519 --- /dev/null +++ b/node_modules/registry-auth-token/node_modules/safe-buffer/README.md @@ -0,0 +1,586 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +[Get supported safe-buffer with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-safe-buffer?utm_source=npm-safe-buffer&utm_medium=referral&utm_campaign=readme) + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/registry-auth-token/node_modules/safe-buffer/index.d.ts b/node_modules/registry-auth-token/node_modules/safe-buffer/index.d.ts new file mode 100644 index 00000000..e9fed809 --- /dev/null +++ b/node_modules/registry-auth-token/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/registry-auth-token/node_modules/safe-buffer/index.js b/node_modules/registry-auth-token/node_modules/safe-buffer/index.js new file mode 100644 index 00000000..054c8d30 --- /dev/null +++ b/node_modules/registry-auth-token/node_modules/safe-buffer/index.js @@ -0,0 +1,64 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/registry-auth-token/node_modules/safe-buffer/package.json b/node_modules/registry-auth-token/node_modules/safe-buffer/package.json new file mode 100644 index 00000000..d532dafb --- /dev/null +++ b/node_modules/registry-auth-token/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.0", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/registry-auth-token/package.json b/node_modules/registry-auth-token/package.json new file mode 100644 index 00000000..1f636b56 --- /dev/null +++ b/node_modules/registry-auth-token/package.json @@ -0,0 +1,46 @@ +{ + "name": "registry-auth-token", + "version": "3.4.0", + "description": "Get the auth token set for an npm registry (if any)", + "main": "index.js", + "scripts": { + "test": "mocha", + "posttest": "standard", + "coverage": "istanbul cover _mocha" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/rexxars/registry-auth-token.git" + }, + "keywords": [ + "npm", + "conf", + "config", + "npmconf", + "registry", + "auth", + "token", + "authtoken" + ], + "author": "Espen Hovlandsdal ", + "license": "MIT", + "bugs": { + "url": "https://github.com/rexxars/registry-auth-token/issues" + }, + "homepage": "https://github.com/rexxars/registry-auth-token#readme", + "dependencies": { + "rc": "^1.1.6", + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "istanbul": "^0.4.2", + "mocha": "^3.3.0", + "require-uncached": "^1.0.2", + "standard": "^10.0.2" + }, + "standard": { + "ignore": [ + "coverage/**" + ] + } +} diff --git a/node_modules/registry-auth-token/registry-url.js b/node_modules/registry-auth-token/registry-url.js new file mode 100644 index 00000000..9da9a44b --- /dev/null +++ b/node_modules/registry-auth-token/registry-url.js @@ -0,0 +1,5 @@ +module.exports = function (scope, npmrc) { + var rc = npmrc || require('rc')('npm', {registry: 'https://registry.npmjs.org/'}) + var url = rc[scope + ':registry'] || rc.registry + return url.slice(-1) === '/' ? url : url + '/' +} diff --git a/node_modules/registry-auth-token/test/auth-token.test.js b/node_modules/registry-auth-token/test/auth-token.test.js new file mode 100644 index 00000000..5db6f5a1 --- /dev/null +++ b/node_modules/registry-auth-token/test/auth-token.test.js @@ -0,0 +1,455 @@ +var fs = require('fs') +var path = require('path') +var mocha = require('mocha') +var assert = require('assert') +var requireUncached = require('require-uncached') + +var npmRcPath = path.join(__dirname, '..', '.npmrc') +var afterEach = mocha.afterEach +var describe = mocha.describe +var it = mocha.it + +var base64 = require('../base64') +var decodeBase64 = base64.decodeBase64 +var encodeBase64 = base64.encodeBase64 + +/* eslint max-nested-callbacks: ["error", 4] */ + +describe('auth-token', function () { + afterEach(function (done) { + fs.unlink(npmRcPath, function () { + done() + }) + }) + + it('should read global if no local is found', function () { + var getAuthToken = requireUncached('../index') + getAuthToken() + }) + + it('should return undefined if no auth token is given for registry', function (done) { + fs.writeFile(npmRcPath, 'registry=http://registry.npmjs.eu/', function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert(!getAuthToken()) + done() + }) + }) + + describe('legacy auth token', function () { + it('should return auth token if it is defined in the legacy way via the `_auth` key', function (done) { + var content = [ + '_auth=foobar', + 'registry=http://registry.foobar.eu/' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken(), {token: 'foobar', type: 'Basic'}) + done() + }) + }) + + it('should return legacy auth token defined by reference to an environment variable (with curly braces)', function (done) { + var environmentVariable = '__REGISTRY_AUTH_TOKEN_NPM_TOKEN__' + var content = [ + '_auth=${' + environmentVariable + '}', + 'registry=http://registry.foobar.eu/' + ].join('\n') + + process.env[environmentVariable] = 'foobar' + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken(), {token: 'foobar', type: 'Basic'}) + delete process.env[environmentVariable] + done() + }) + }) + + it('should return legacy auth token defined by reference to an environment variable (without curly braces)', function (done) { + var environmentVariable = '__REGISTRY_AUTH_TOKEN_NPM_TOKEN__' + var content = [ + '_auth=$' + environmentVariable, + 'registry=http://registry.foobar.eu/' + ].join('\n') + + process.env[environmentVariable] = 'foobar' + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken(), {token: 'foobar', type: 'Basic'}) + delete process.env[environmentVariable] + done() + }) + }) + }) + + describe('bearer token', function () { + it('should return auth token if registry is defined', function (done) { + var content = [ + 'registry=http://registry.foobar.eu/', + '//registry.foobar.eu/:_authToken=foobar', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken(), {token: 'foobar', type: 'Bearer'}) + done() + }) + }) + + it('should use npmrc passed in', function (done) { + var content = [ + 'registry=http://registry.foobar.eu/', + '//registry.foobar.eu/:_authToken=foobar', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + const npmrc = { + 'registry': 'http://registry.foobar.eu/', + '//registry.foobar.eu/:_authToken': 'qar' + } + assert.deepEqual(getAuthToken({npmrc: npmrc}), {token: 'qar', type: 'Bearer'}) + done() + }) + }) + + it('should return auth token if registry url has port specified', function (done) { + var content = [ + 'registry=http://localhost:8770/', + // before the patch this token was selected. + '//localhost/:_authToken=ohno', + '//localhost:8770/:_authToken=beepboop', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken(), {token: 'beepboop', type: 'Bearer'}) + done() + }) + }) + + it('should return auth token defined by reference to an environment variable (with curly braces)', function (done) { + var environmentVariable = '__REGISTRY_AUTH_TOKEN_NPM_TOKEN__' + var content = [ + 'registry=http://registry.foobar.cc/', + '//registry.foobar.cc/:_authToken=${' + environmentVariable + '}', '' + ].join('\n') + process.env[environmentVariable] = 'foobar' + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken(), {token: 'foobar', type: 'Bearer'}) + delete process.env[environmentVariable] + done() + }) + }) + + it('should return auth token defined by reference to an environment variable (without curly braces)', function (done) { + var environmentVariable = '__REGISTRY_AUTH_TOKEN_NPM_TOKEN__' + var content = [ + 'registry=http://registry.foobar.cc/', + '//registry.foobar.cc/:_authToken=$' + environmentVariable, '' + ].join('\n') + process.env[environmentVariable] = 'foobar' + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken(), {token: 'foobar', type: 'Bearer'}) + delete process.env[environmentVariable] + done() + }) + }) + + it('should try with and without a slash at the end of registry url', function (done) { + var content = [ + 'registry=http://registry.foobar.eu', + '//registry.foobar.eu:_authToken=barbaz', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken(), {token: 'barbaz', type: 'Bearer'}) + done() + }) + }) + + it('should fetch for the registry given (if defined)', function (done) { + var content = [ + '//registry.foobar.eu:_authToken=barbaz', + '//registry.blah.foo:_authToken=whatev', + '//registry.last.thing:_authToken=yep', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken('//registry.blah.foo'), {token: 'whatev', type: 'Bearer'}) + done() + }) + }) + + it('recursively finds registries for deep url if option is set', function (done, undef) { + var opts = {recursive: true} + var content = [ + '//registry.blah.com/foo:_authToken=whatev', + '//registry.blah.org/foo/bar:_authToken=recurseExactlyOneLevel', + '//registry.blah.edu/foo/bar/baz:_authToken=recurseNoLevel', + '//registry.blah.eu:_authToken=yep', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken('https://registry.blah.edu/foo/bar/baz', opts), {token: 'recurseNoLevel', type: 'Bearer'}) + assert.deepEqual(getAuthToken('https://registry.blah.org/foo/bar/baz', opts), {token: 'recurseExactlyOneLevel', type: 'Bearer'}) + assert.deepEqual(getAuthToken('https://registry.blah.com/foo/bar/baz', opts), {token: 'whatev', type: 'Bearer'}) + assert.deepEqual(getAuthToken('http://registry.blah.eu/what/ever', opts), {token: 'yep', type: 'Bearer'}) + assert.deepEqual(getAuthToken('http://registry.blah.eu//what/ever', opts), undefined, 'does not hang') + assert.equal(getAuthToken('//some.registry', opts), undef) + done() + }) + }) + + it('should try both with and without trailing slash', function (done) { + fs.writeFile(npmRcPath, '//registry.blah.com:_authToken=whatev', function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken('https://registry.blah.com'), {token: 'whatev', type: 'Bearer'}) + done() + }) + }) + + it('should prefer bearer token over basic token', function (done) { + var content = [ + 'registry=http://registry.foobar.eu/', + 'registry=http://registry.foobar.eu/', + '//registry.foobar.eu/:_authToken=bearerToken', + '//registry.foobar.eu/:_password=' + encodeBase64('foobar'), + '//registry.foobar.eu/:username=foobar', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual(getAuthToken('//registry.foobar.eu'), {token: 'bearerToken', type: 'Bearer'}) + done() + }) + }) + + it('"nerf darts" registry urls', function (done, undef) { + fs.writeFile(npmRcPath, '//contoso.pkgs.visualstudio.com/_packaging/MyFeed/npm/:_authToken=heider', function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.deepEqual( + getAuthToken('https://contoso.pkgs.visualstudio.com/_packaging/MyFeed/npm/registry'), + {token: 'heider', type: 'Bearer'} + ) + done() + }) + }) + }) + + describe('basic token', function () { + it('should return undefined if password or username are missing', function (done, undef) { + var content = [ + 'registry=http://registry.foobar.eu/', + '//registry.foobar.eu/:_password=' + encodeBase64('foobar'), + '//registry.foobar.com/:username=foobar', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + assert.equal(getAuthToken('//registry.foobar.eu'), undef) + assert.equal(getAuthToken('//registry.foobar.com'), undef) + done() + }) + }) + + it('should return basic token if username and password are defined', function (done) { + var content = [ + 'registry=http://registry.foobar.eu/', + '//registry.foobar.eu/:_password=' + encodeBase64('foobar'), + '//registry.foobar.eu/:username=foobar', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + var token = getAuthToken() + assert.deepEqual(token, { + token: 'Zm9vYmFyOmZvb2Jhcg==', + type: 'Basic', + username: 'foobar', + password: 'foobar' + }) + assert.equal(decodeBase64(token.token), 'foobar:foobar') + done() + }) + }) + + it('should return basic token if registry url has port specified', function (done) { + var content = [ + 'registry=http://localhost:8770/', + // before the patch this token was selected. + '//localhost/:_authToken=ohno', + '//localhost:8770/:_password=' + encodeBase64('foobar'), + '//localhost:8770/:username=foobar', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + var token = getAuthToken() + assert.deepEqual(token, { + token: 'Zm9vYmFyOmZvb2Jhcg==', + type: 'Basic', + username: 'foobar', + password: 'foobar' + }) + assert.equal(decodeBase64(token.token), 'foobar:foobar') + done() + }) + }) + + it('should return password defined by reference to an environment variable (with curly braces)', function (done) { + var environmentVariable = '__REGISTRY_PASSWORD__' + var content = [ + 'registry=http://registry.foobar.cc/', + '//registry.foobar.cc/:username=username', + '//registry.foobar.cc/:_password=${' + environmentVariable + '}', '' + ].join('\n') + process.env[environmentVariable] = encodeBase64('password') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + var token = getAuthToken() + assert.deepEqual(token, { + type: 'Basic', + username: 'username', + password: 'password', + token: 'dXNlcm5hbWU6cGFzc3dvcmQ=' + }) + assert.equal(decodeBase64(token.token), 'username:password') + delete process.env[environmentVariable] + done() + }) + }) + + it('should return password defined by reference to an environment variable (without curly braces)', function (done) { + var environmentVariable = '__REGISTRY_PASSWORD__' + var content = [ + 'registry=http://registry.foobar.cc/', + '//registry.foobar.cc/:username=username', + '//registry.foobar.cc/:_password=$' + environmentVariable, '' + ].join('\n') + process.env[environmentVariable] = encodeBase64('password') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + var token = getAuthToken() + assert.deepEqual(token, { + type: 'Basic', + username: 'username', + password: 'password', + token: 'dXNlcm5hbWU6cGFzc3dvcmQ=' + }) + assert.equal(decodeBase64(token.token), 'username:password') + delete process.env[environmentVariable] + done() + }) + }) + + it('should try with and without a slash at the end of registry url', function (done) { + var content = [ + 'registry=http://registry.foobar.eu', + '//registry.foobar.eu:_password=' + encodeBase64('barbay'), + '//registry.foobar.eu:username=barbaz', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + var token = getAuthToken() + assert.deepEqual(token, { + token: 'YmFyYmF6OmJhcmJheQ==', + type: 'Basic', + password: 'barbay', + username: 'barbaz' + }) + assert.equal(decodeBase64(token.token), 'barbaz:barbay') + done() + }) + }) + + it('should fetch for the registry given (if defined)', function (done) { + var content = [ + '//registry.foobar.eu:_authToken=barbaz', + '//registry.blah.foo:_password=' + encodeBase64('barbay'), + '//registry.blah.foo:username=barbaz', + '//registry.last.thing:_authToken=yep', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + var token = getAuthToken('//registry.blah.foo') + assert.deepEqual(token, { + token: 'YmFyYmF6OmJhcmJheQ==', + type: 'Basic', + password: 'barbay', + username: 'barbaz' + }) + assert.equal(decodeBase64(token.token), 'barbaz:barbay') + done() + }) + }) + + it('recursively finds registries for deep url if option is set', function (done, undef) { + var opts = {recursive: true} + var content = [ + '//registry.blah.com/foo:_password=' + encodeBase64('barbay'), + '//registry.blah.com/foo:username=barbaz', + '//registry.blah.eu:username=barbaz', + '//registry.blah.eu:_password=' + encodeBase64('foobaz'), '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getAuthToken = requireUncached('../index') + assert(!err, err) + var token = getAuthToken('https://registry.blah.com/foo/bar/baz', opts) + assert.deepEqual(token, { + token: 'YmFyYmF6OmJhcmJheQ==', + type: 'Basic', + password: 'barbay', + username: 'barbaz' + }) + assert.equal(decodeBase64(token.token), 'barbaz:barbay') + token = getAuthToken('https://registry.blah.eu/foo/bar/baz', opts) + assert.deepEqual(token, { + token: 'YmFyYmF6OmZvb2Jheg==', + type: 'Basic', + password: 'foobaz', + username: 'barbaz' + }) + assert.equal(decodeBase64(token.token), 'barbaz:foobaz') + assert.equal(getAuthToken('//some.registry', opts), undef) + done() + }) + }) + }) +}) diff --git a/node_modules/registry-auth-token/test/registry-url.test.js b/node_modules/registry-auth-token/test/registry-url.test.js new file mode 100644 index 00000000..adb6951b --- /dev/null +++ b/node_modules/registry-auth-token/test/registry-url.test.js @@ -0,0 +1,64 @@ +var fs = require('fs') +var path = require('path') +var mocha = require('mocha') +var assert = require('assert') +var requireUncached = require('require-uncached') + +var npmRcPath = path.join(__dirname, '..', '.npmrc') +var afterEach = mocha.afterEach +var describe = mocha.describe +var it = mocha.it + +describe('registry-url', function () { + afterEach(function (done) { + fs.unlink(npmRcPath, function () { + done() + }) + }) + + it('should read global if no local is found', function () { + var getRegistryUrl = requireUncached('../registry-url') + getRegistryUrl() + }) + + it('should return default registry if no url is given for scope', function (done) { + fs.writeFile(npmRcPath, 'registry=https://registry.npmjs.org/', function (err) { + var getRegistryUrl = requireUncached('../registry-url') + assert(!err, err) + assert.equal(getRegistryUrl('@somescope'), 'https://registry.npmjs.org/') + done() + }) + }) + + it('should return registry url if url is given for scope ', function (done) { + fs.writeFile(npmRcPath, '@somescope:registry=https://some.registry/', function (err) { + var getRegistryUrl = requireUncached('../registry-url') + assert(!err, err) + assert.equal(getRegistryUrl('@somescope'), 'https://some.registry/') + done() + }) + }) + + it('should append trailing slash if not present', function (done) { + fs.writeFile(npmRcPath, '@somescope:registry=https://some.registry', function (err) { + var getRegistryUrl = requireUncached('../registry-url') + assert(!err, err) + assert.equal(getRegistryUrl('@somescope'), 'https://some.registry/') + done() + }) + }) + + it('should return configured global registry if given', function (done) { + var content = [ + 'registry=http://registry.foobar.eu/', + '@somescope:registry=https://some.url/', '' + ].join('\n') + + fs.writeFile(npmRcPath, content, function (err) { + var getRegistryUrl = requireUncached('../registry-url') + assert(!err, err) + assert.equal(getRegistryUrl(), 'http://registry.foobar.eu/') + done() + }) + }) +}) diff --git a/node_modules/registry-auth-token/yarn.lock b/node_modules/registry-auth-token/yarn.lock new file mode 100644 index 00000000..46c13572 --- /dev/null +++ b/node_modules/registry-auth-token/yarn.lock @@ -0,0 +1,1516 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +abbrev@1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + +abbrev@1.0.x: + version "1.0.9" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" + +acorn-jsx@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b" + dependencies: + acorn "^3.0.4" + +acorn@^3.0.4: + version "3.3.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" + +acorn@^5.2.1: + version "5.3.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.3.0.tgz#7446d39459c54fb49a80e6ee6478149b940ec822" + +ajv-keywords@^1.0.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-1.5.1.tgz#314dd0a4b3368fad3dfcdc54ede6171b886daf3c" + +ajv@^4.7.0: + version "4.11.8" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536" + dependencies: + co "^4.6.0" + json-stable-stringify "^1.0.1" + +align-text@^0.1.1, align-text@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" + dependencies: + kind-of "^3.0.2" + longest "^1.0.1" + repeat-string "^1.5.2" + +amdefine@>=0.0.4: + version "1.0.1" + resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" + +ansi-escapes@^1.1.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + +ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + +ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + +argparse@^1.0.7: + version "1.0.9" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.9.tgz#73d83bc263f86e97f8cc4f6bae1b0e90a7d22c86" + dependencies: + sprintf-js "~1.0.2" + +array-union@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" + dependencies: + array-uniq "^1.0.1" + +array-uniq@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" + +array.prototype.find@^2.0.1: + version "2.0.4" + resolved "https://registry.yarnpkg.com/array.prototype.find/-/array.prototype.find-2.0.4.tgz#556a5c5362c08648323ddaeb9de9d14bc1864c90" + dependencies: + define-properties "^1.1.2" + es-abstract "^1.7.0" + +arrify@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + +async@1.x, async@^1.4.0: + version "1.5.2" + resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" + +babel-code-frame@^6.16.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" + dependencies: + chalk "^1.1.3" + esutils "^2.0.2" + js-tokens "^3.0.2" + +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + +brace-expansion@^1.1.7: + version "1.1.8" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292" + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +browser-stdout@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.0.tgz#f351d32969d32fa5d7a5567154263d928ae3bd1f" + +builtin-modules@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" + +caller-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f" + dependencies: + callsites "^0.2.0" + +callsites@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca" + +camelcase@^1.0.2: + version "1.2.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" + +center-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" + dependencies: + align-text "^0.1.3" + lazy-cache "^1.0.3" + +chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + +circular-json@^0.3.1: + version "0.3.3" + resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.3.tgz#815c99ea84f6809529d2f45791bdf82711352d66" + +cli-cursor@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" + dependencies: + restore-cursor "^1.0.1" + +cli-width@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639" + +cliui@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" + dependencies: + center-align "^0.1.1" + right-align "^0.1.1" + wordwrap "0.0.2" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + +commander@2.9.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.9.0.tgz#9c99094176e12240cb22d6c5146098400fe0f7d4" + dependencies: + graceful-readlink ">= 1.0.0" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + +concat-stream@^1.5.2: + version "1.6.0" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.0.tgz#0aac662fd52be78964d5532f694784e70110acf7" + dependencies: + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + +contains-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a" + +core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + +d@1: + version "1.0.0" + resolved "https://registry.yarnpkg.com/d/-/d-1.0.0.tgz#754bb5bfe55451da69a58b94d45f4c5b0462d58f" + dependencies: + es5-ext "^0.10.9" + +debug-log@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/debug-log/-/debug-log-1.0.1.tgz#2307632d4c04382b8df8a32f70b895046d52745f" + +debug@2.6.8: + version "2.6.8" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.8.tgz#e731531ca2ede27d188222427da17821d68ff4fc" + dependencies: + ms "2.0.0" + +debug@^2.1.1, debug@^2.2.0, debug@^2.6.8: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + dependencies: + ms "2.0.0" + +decamelize@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" + +deep-is@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + +define-properties@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.2.tgz#83a73f2fea569898fb737193c8f873caf6d45c94" + dependencies: + foreach "^2.0.5" + object-keys "^1.0.8" + +deglob@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/deglob/-/deglob-2.1.0.tgz#4d44abe16ef32c779b4972bd141a80325029a14a" + dependencies: + find-root "^1.0.0" + glob "^7.0.5" + ignore "^3.0.9" + pkg-config "^1.1.0" + run-parallel "^1.1.2" + uniq "^1.0.1" + +del@^2.0.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8" + dependencies: + globby "^5.0.0" + is-path-cwd "^1.0.0" + is-path-in-cwd "^1.0.0" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + rimraf "^2.2.8" + +diff@3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-3.2.0.tgz#c9ce393a4b7cbd0b058a725c93df299027868ff9" + +doctrine@1.5.0, doctrine@^1.2.2: + version "1.5.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa" + dependencies: + esutils "^2.0.2" + isarray "^1.0.0" + +doctrine@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + dependencies: + esutils "^2.0.2" + +error-ex@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc" + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.7.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.10.0.tgz#1ecb36c197842a00d8ee4c2dfd8646bb97d60864" + dependencies: + es-to-primitive "^1.1.1" + function-bind "^1.1.1" + has "^1.0.1" + is-callable "^1.1.3" + is-regex "^1.0.4" + +es-to-primitive@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.1.1.tgz#45355248a88979034b6792e19bb81f2b7975dd0d" + dependencies: + is-callable "^1.1.1" + is-date-object "^1.0.1" + is-symbol "^1.0.1" + +es5-ext@^0.10.14, es5-ext@^0.10.35, es5-ext@^0.10.9, es5-ext@~0.10.14: + version "0.10.38" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.38.tgz#fa7d40d65bbc9bb8a67e1d3f9cc656a00530eed3" + dependencies: + es6-iterator "~2.0.3" + es6-symbol "~3.1.1" + +es6-iterator@^2.0.1, es6-iterator@~2.0.1, es6-iterator@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" + dependencies: + d "1" + es5-ext "^0.10.35" + es6-symbol "^3.1.1" + +es6-map@^0.1.3: + version "0.1.5" + resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.5.tgz#9136e0503dcc06a301690f0bb14ff4e364e949f0" + dependencies: + d "1" + es5-ext "~0.10.14" + es6-iterator "~2.0.1" + es6-set "~0.1.5" + es6-symbol "~3.1.1" + event-emitter "~0.3.5" + +es6-set@~0.1.5: + version "0.1.5" + resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1" + dependencies: + d "1" + es5-ext "~0.10.14" + es6-iterator "~2.0.1" + es6-symbol "3.1.1" + event-emitter "~0.3.5" + +es6-symbol@3.1.1, es6-symbol@^3.1.1, es6-symbol@~3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77" + dependencies: + d "1" + es5-ext "~0.10.14" + +es6-weak-map@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.2.tgz#5e3ab32251ffd1538a1f8e5ffa1357772f92d96f" + dependencies: + d "1" + es5-ext "^0.10.14" + es6-iterator "^2.0.1" + es6-symbol "^3.1.1" + +escape-string-regexp@1.0.5, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + +escodegen@1.8.x: + version "1.8.1" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" + dependencies: + esprima "^2.7.1" + estraverse "^1.9.1" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.2.0" + +escope@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/escope/-/escope-3.6.0.tgz#e01975e812781a163a6dadfdd80398dc64c889c3" + dependencies: + es6-map "^0.1.3" + es6-weak-map "^2.0.1" + esrecurse "^4.1.0" + estraverse "^4.1.1" + +eslint-config-standard-jsx@4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/eslint-config-standard-jsx/-/eslint-config-standard-jsx-4.0.2.tgz#009e53c4ddb1e9ee70b4650ffe63a7f39f8836e1" + +eslint-config-standard@10.2.1: + version "10.2.1" + resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-10.2.1.tgz#c061e4d066f379dc17cd562c64e819b4dd454591" + +eslint-import-resolver-node@^0.2.0: + version "0.2.3" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.2.3.tgz#5add8106e8c928db2cba232bcd9efa846e3da16c" + dependencies: + debug "^2.2.0" + object-assign "^4.0.1" + resolve "^1.1.6" + +eslint-module-utils@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.1.1.tgz#abaec824177613b8a95b299639e1b6facf473449" + dependencies: + debug "^2.6.8" + pkg-dir "^1.0.0" + +eslint-plugin-import@~2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.2.0.tgz#72ba306fad305d67c4816348a4699a4229ac8b4e" + dependencies: + builtin-modules "^1.1.1" + contains-path "^0.1.0" + debug "^2.2.0" + doctrine "1.5.0" + eslint-import-resolver-node "^0.2.0" + eslint-module-utils "^2.0.0" + has "^1.0.1" + lodash.cond "^4.3.0" + minimatch "^3.0.3" + pkg-up "^1.0.0" + +eslint-plugin-node@~4.2.2: + version "4.2.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-4.2.3.tgz#c04390ab8dbcbb6887174023d6f3a72769e63b97" + dependencies: + ignore "^3.0.11" + minimatch "^3.0.2" + object-assign "^4.0.1" + resolve "^1.1.7" + semver "5.3.0" + +eslint-plugin-promise@~3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-3.5.0.tgz#78fbb6ffe047201627569e85a6c5373af2a68fca" + +eslint-plugin-react@~6.10.0: + version "6.10.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-6.10.3.tgz#c5435beb06774e12c7db2f6abaddcbf900cd3f78" + dependencies: + array.prototype.find "^2.0.1" + doctrine "^1.2.2" + has "^1.0.1" + jsx-ast-utils "^1.3.4" + object.assign "^4.0.4" + +eslint-plugin-standard@~3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-standard/-/eslint-plugin-standard-3.0.1.tgz#34d0c915b45edc6f010393c7eef3823b08565cf2" + +eslint@~3.19.0: + version "3.19.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-3.19.0.tgz#c8fc6201c7f40dd08941b87c085767386a679acc" + dependencies: + babel-code-frame "^6.16.0" + chalk "^1.1.3" + concat-stream "^1.5.2" + debug "^2.1.1" + doctrine "^2.0.0" + escope "^3.6.0" + espree "^3.4.0" + esquery "^1.0.0" + estraverse "^4.2.0" + esutils "^2.0.2" + file-entry-cache "^2.0.0" + glob "^7.0.3" + globals "^9.14.0" + ignore "^3.2.0" + imurmurhash "^0.1.4" + inquirer "^0.12.0" + is-my-json-valid "^2.10.0" + is-resolvable "^1.0.0" + js-yaml "^3.5.1" + json-stable-stringify "^1.0.0" + levn "^0.3.0" + lodash "^4.0.0" + mkdirp "^0.5.0" + natural-compare "^1.4.0" + optionator "^0.8.2" + path-is-inside "^1.0.1" + pluralize "^1.2.1" + progress "^1.1.8" + require-uncached "^1.0.2" + shelljs "^0.7.5" + strip-bom "^3.0.0" + strip-json-comments "~2.0.1" + table "^3.7.8" + text-table "~0.2.0" + user-home "^2.0.0" + +espree@^3.4.0: + version "3.5.2" + resolved "https://registry.yarnpkg.com/espree/-/espree-3.5.2.tgz#756ada8b979e9dcfcdb30aad8d1a9304a905e1ca" + dependencies: + acorn "^5.2.1" + acorn-jsx "^3.0.0" + +esprima@2.7.x, esprima@^2.7.1: + version "2.7.3" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" + +esprima@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" + +esquery@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.0.tgz#cfba8b57d7fba93f17298a8a006a04cda13d80fa" + dependencies: + estraverse "^4.0.0" + +esrecurse@^4.1.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.0.tgz#fa9568d98d3823f9a41d91e902dcab9ea6e5b163" + dependencies: + estraverse "^4.1.0" + object-assign "^4.0.1" + +estraverse@^1.9.1: + version "1.9.3" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" + +estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1, estraverse@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" + +esutils@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" + +event-emitter@~0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" + dependencies: + d "1" + es5-ext "~0.10.14" + +exit-hook@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-1.1.1.tgz#f05ca233b48c05d54fff07765df8507e95c02ff8" + +fast-levenshtein@~2.0.4: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + +figures@^1.3.5: + version "1.7.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" + dependencies: + escape-string-regexp "^1.0.5" + object-assign "^4.1.0" + +file-entry-cache@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-2.0.0.tgz#c392990c3e684783d838b8c84a45d8a048458361" + dependencies: + flat-cache "^1.2.1" + object-assign "^4.0.1" + +find-root@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" + +find-up@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" + dependencies: + path-exists "^2.0.0" + pinkie-promise "^2.0.0" + +find-up@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + dependencies: + locate-path "^2.0.0" + +flat-cache@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.0.tgz#d3030b32b38154f4e3b7e9c709f490f7ef97c481" + dependencies: + circular-json "^0.3.1" + del "^2.0.2" + graceful-fs "^4.1.2" + write "^0.2.1" + +foreach@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + +function-bind@^1.0.2, function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + +generate-function@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.0.0.tgz#6858fe7c0969b7d4e9093337647ac79f60dfbe74" + +generate-object-property@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/generate-object-property/-/generate-object-property-1.2.0.tgz#9c0e1c40308ce804f4783618b937fa88f99d50d0" + dependencies: + is-property "^1.0.0" + +get-stdin@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-5.0.1.tgz#122e161591e21ff4c52530305693f20e6393a398" + +glob@7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.2" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^5.0.15: + version "5.0.15" + resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" + dependencies: + inflight "^1.0.4" + inherits "2" + minimatch "2 || 3" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^7.0.0, glob@^7.0.3, glob@^7.0.5: + version "7.1.2" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^9.14.0: + version "9.18.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" + +globby@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d" + dependencies: + array-union "^1.0.1" + arrify "^1.0.0" + glob "^7.0.3" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +graceful-fs@^4.1.2: + version "4.1.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" + +"graceful-readlink@>= 1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/graceful-readlink/-/graceful-readlink-1.0.1.tgz#4cafad76bc62f02fa039b2f94e9a3dd3a391a725" + +growl@1.9.2: + version "1.9.2" + resolved "https://registry.yarnpkg.com/growl/-/growl-1.9.2.tgz#0ea7743715db8d8de2c5ede1775e1b45ac85c02f" + +handlebars@^4.0.1: + version "4.0.11" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.11.tgz#630a35dfe0294bc281edae6ffc5d329fc7982dcc" + dependencies: + async "^1.4.0" + optimist "^0.6.1" + source-map "^0.4.4" + optionalDependencies: + uglify-js "^2.6" + +has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + dependencies: + ansi-regex "^2.0.0" + +has-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" + +has-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.0.tgz#ba1a8f1af2a0fc39650f5c850367704122063b44" + +has@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28" + dependencies: + function-bind "^1.0.2" + +he@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd" + +ignore@^3.0.11, ignore@^3.0.9, ignore@^3.2.0: + version "3.3.7" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.7.tgz#612289bfb3c220e186a58118618d5be8c1bab021" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@^2.0.3, inherits@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + +ini@~1.3.0: + version "1.3.5" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + +inquirer@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.12.0.tgz#1ef2bfd63504df0bc75785fff8c2c41df12f077e" + dependencies: + ansi-escapes "^1.1.0" + ansi-regex "^2.0.0" + chalk "^1.0.0" + cli-cursor "^1.0.1" + cli-width "^2.0.0" + figures "^1.3.5" + lodash "^4.3.0" + readline2 "^1.0.1" + run-async "^0.1.0" + rx-lite "^3.1.2" + string-width "^1.0.1" + strip-ansi "^3.0.0" + through "^2.3.6" + +interpret@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.1.0.tgz#7ed1b1410c6a0e0f78cf95d3b8440c63f78b8614" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + +is-callable@^1.1.1, is-callable@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.3.tgz#86eb75392805ddc33af71c92a0eedf74ee7604b2" + +is-date-object@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" + +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + +is-my-json-valid@^2.10.0: + version "2.17.1" + resolved "https://registry.yarnpkg.com/is-my-json-valid/-/is-my-json-valid-2.17.1.tgz#3da98914a70a22f0a8563ef1511a246c6fc55471" + dependencies: + generate-function "^2.0.0" + generate-object-property "^1.1.0" + jsonpointer "^4.0.0" + xtend "^4.0.0" + +is-path-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" + +is-path-in-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.0.tgz#6477582b8214d602346094567003be8a9eac04dc" + dependencies: + is-path-inside "^1.0.0" + +is-path-inside@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.1.tgz#8ef5b7de50437a3fdca6b4e865ef7aa55cb48036" + dependencies: + path-is-inside "^1.0.1" + +is-property@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-property/-/is-property-1.0.2.tgz#57fe1c4e48474edd65b09911f26b1cd4095dda84" + +is-regex@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" + dependencies: + has "^1.0.1" + +is-resolvable@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" + +is-symbol@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.1.tgz#3cc59f00025194b6ab2e38dbae6689256b660572" + +isarray@^1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + +istanbul@^0.4.2: + version "0.4.5" + resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" + dependencies: + abbrev "1.0.x" + async "1.x" + escodegen "1.8.x" + esprima "2.7.x" + glob "^5.0.15" + handlebars "^4.0.1" + js-yaml "3.x" + mkdirp "0.5.x" + nopt "3.x" + once "1.x" + resolve "1.1.x" + supports-color "^3.1.0" + which "^1.1.1" + wordwrap "^1.0.0" + +js-tokens@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" + +js-yaml@3.x, js-yaml@^3.5.1: + version "3.10.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.10.0.tgz#2e78441646bd4682e963f22b6e92823c309c62dc" + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +json-parse-better-errors@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.1.tgz#50183cd1b2d25275de069e9e71b467ac9eab973a" + +json-stable-stringify@^1.0.0, json-stable-stringify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" + dependencies: + jsonify "~0.0.0" + +json3@3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1" + +jsonify@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" + +jsonpointer@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-4.0.1.tgz#4fd92cb34e0e9db3c89c8622ecf51f9b978c6cb9" + +jsx-ast-utils@^1.3.4: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-1.4.1.tgz#3867213e8dd79bf1e8f2300c0cfc1efb182c0df1" + +kind-of@^3.0.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + dependencies: + is-buffer "^1.1.5" + +lazy-cache@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" + +levn@^0.3.0, levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +load-json-file@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b" + dependencies: + graceful-fs "^4.1.2" + parse-json "^4.0.0" + pify "^3.0.0" + strip-bom "^3.0.0" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +lodash._baseassign@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz#8c38a099500f215ad09e59f1722fd0c52bfe0a4e" + dependencies: + lodash._basecopy "^3.0.0" + lodash.keys "^3.0.0" + +lodash._basecopy@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz#8da0e6a876cf344c0ad8a54882111dd3c5c7ca36" + +lodash._basecreate@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash._basecreate/-/lodash._basecreate-3.0.3.tgz#1bc661614daa7fc311b7d03bf16806a0213cf821" + +lodash._getnative@^3.0.0: + version "3.9.1" + resolved "https://registry.yarnpkg.com/lodash._getnative/-/lodash._getnative-3.9.1.tgz#570bc7dede46d61cdcde687d65d3eecbaa3aaff5" + +lodash._isiterateecall@^3.0.0: + version "3.0.9" + resolved "https://registry.yarnpkg.com/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz#5203ad7ba425fae842460e696db9cf3e6aac057c" + +lodash.cond@^4.3.0: + version "4.5.2" + resolved "https://registry.yarnpkg.com/lodash.cond/-/lodash.cond-4.5.2.tgz#f471a1da486be60f6ab955d17115523dd1d255d5" + +lodash.create@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/lodash.create/-/lodash.create-3.1.1.tgz#d7f2849f0dbda7e04682bb8cd72ab022461debe7" + dependencies: + lodash._baseassign "^3.0.0" + lodash._basecreate "^3.0.0" + lodash._isiterateecall "^3.0.0" + +lodash.isarguments@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a" + +lodash.isarray@^3.0.0: + version "3.0.4" + resolved "https://registry.yarnpkg.com/lodash.isarray/-/lodash.isarray-3.0.4.tgz#79e4eb88c36a8122af86f844aa9bcd851b5fbb55" + +lodash.keys@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-3.1.2.tgz#4dbc0472b156be50a0b286855d1bd0b0c656098a" + dependencies: + lodash._getnative "^3.0.0" + lodash.isarguments "^3.0.0" + lodash.isarray "^3.0.0" + +lodash@^4.0.0, lodash@^4.3.0: + version "4.17.4" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" + +longest@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" + +"minimatch@2 || 3", minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + dependencies: + brace-expansion "^1.1.7" + +minimist@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + +minimist@^1.1.0, minimist@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" + +minimist@~0.0.1: + version "0.0.10" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" + +mkdirp@0.5.1, mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + dependencies: + minimist "0.0.8" + +mocha@^3.3.0: + version "3.5.3" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-3.5.3.tgz#1e0480fe36d2da5858d1eb6acc38418b26eaa20d" + dependencies: + browser-stdout "1.3.0" + commander "2.9.0" + debug "2.6.8" + diff "3.2.0" + escape-string-regexp "1.0.5" + glob "7.1.1" + growl "1.9.2" + he "1.1.1" + json3 "3.3.2" + lodash.create "3.1.1" + mkdirp "0.5.1" + supports-color "3.1.2" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + +mute-stream@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.5.tgz#8fbfabb0a98a253d3184331f9e8deb7372fac6c0" + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + +nopt@3.x: + version "3.0.6" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" + dependencies: + abbrev "1" + +number-is-nan@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + +object-assign@^4.0.1, object-assign@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + +object-keys@^1.0.11, object-keys@^1.0.8: + version "1.0.11" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d" + +object.assign@^4.0.4: + version "4.1.0" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" + dependencies: + define-properties "^1.1.2" + function-bind "^1.1.1" + has-symbols "^1.0.0" + object-keys "^1.0.11" + +once@1.x, once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + dependencies: + wrappy "1" + +onetime@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-1.1.0.tgz#a1f7838f8314c516f05ecefcbc4ccfe04b4ed789" + +optimist@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" + dependencies: + minimist "~0.0.1" + wordwrap "~0.0.2" + +optionator@^0.8.1, optionator@^0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.4" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + wordwrap "~1.0.0" + +os-homedir@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + +p-limit@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.2.0.tgz#0e92b6bedcb59f022c13d0f1949dc82d15909f1c" + dependencies: + p-try "^1.0.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + dependencies: + p-limit "^1.1.0" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + +parse-json@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" + dependencies: + error-ex "^1.3.1" + json-parse-better-errors "^1.0.1" + +path-exists@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" + dependencies: + pinkie-promise "^2.0.0" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + +path-is-inside@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" + +path-parse@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" + +pify@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + +pify@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" + +pinkie-promise@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + dependencies: + pinkie "^2.0.0" + +pinkie@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + +pkg-conf@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-2.1.0.tgz#2126514ca6f2abfebd168596df18ba57867f0058" + dependencies: + find-up "^2.0.0" + load-json-file "^4.0.0" + +pkg-config@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/pkg-config/-/pkg-config-1.1.1.tgz#557ef22d73da3c8837107766c52eadabde298fe4" + dependencies: + debug-log "^1.0.0" + find-root "^1.0.0" + xtend "^4.0.1" + +pkg-dir@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-1.0.0.tgz#7a4b508a8d5bb2d629d447056ff4e9c9314cf3d4" + dependencies: + find-up "^1.0.0" + +pkg-up@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-1.0.0.tgz#3e08fb461525c4421624a33b9f7e6d0af5b05a26" + dependencies: + find-up "^1.0.0" + +pluralize@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-1.2.1.tgz#d1a21483fd22bb41e58a12fa3421823140897c45" + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + +process-nextick-args@~1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3" + +progress@^1.1.8: + version "1.1.8" + resolved "https://registry.yarnpkg.com/progress/-/progress-1.1.8.tgz#e260c78f6161cdd9b0e56cc3e0a85de17c7a57be" + +rc@^1.2.8: + version "1.2.8" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +readable-stream@^2.2.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~1.0.6" + safe-buffer "~5.1.1" + string_decoder "~1.0.3" + util-deprecate "~1.0.1" + +readline2@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/readline2/-/readline2-1.0.1.tgz#41059608ffc154757b715d9989d199ffbf372e35" + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + mute-stream "0.0.5" + +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + dependencies: + resolve "^1.1.6" + +repeat-string@^1.5.2: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + +require-uncached@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3" + dependencies: + caller-path "^0.1.0" + resolve-from "^1.0.0" + +resolve-from@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226" + +resolve@1.1.x: + version "1.1.7" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" + +resolve@^1.1.6, resolve@^1.1.7: + version "1.5.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.5.0.tgz#1f09acce796c9a762579f31b2c1cc4c3cddf9f36" + dependencies: + path-parse "^1.0.5" + +restore-cursor@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-1.0.1.tgz#34661f46886327fed2991479152252df92daa541" + dependencies: + exit-hook "^1.0.0" + onetime "^1.0.0" + +right-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" + dependencies: + align-text "^0.1.1" + +rimraf@^2.2.8: + version "2.6.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" + dependencies: + glob "^7.0.5" + +run-async@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-0.1.0.tgz#c8ad4a5e110661e402a7d21b530e009f25f8e389" + dependencies: + once "^1.3.0" + +run-parallel@^1.1.2: + version "1.1.6" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.1.6.tgz#29003c9a2163e01e2d2dfc90575f2c6c1d61a039" + +rx-lite@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-3.1.2.tgz#19ce502ca572665f3b647b10939f97fd1615f102" + +safe-buffer@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" + +semver@5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" + +shelljs@^0.7.5: + version "0.7.8" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.7.8.tgz#decbcf874b0d1e5fb72e14b164a9683048e9acb3" + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + +slice-ansi@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" + +source-map@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" + dependencies: + amdefine ">=0.0.4" + +source-map@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" + dependencies: + amdefine ">=0.0.4" + +source-map@~0.5.1: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + +standard-engine@~7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/standard-engine/-/standard-engine-7.0.0.tgz#ebb77b9c8fc2c8165ffa353bd91ba0dff41af690" + dependencies: + deglob "^2.1.0" + get-stdin "^5.0.1" + minimist "^1.1.0" + pkg-conf "^2.0.0" + +standard@^10.0.2: + version "10.0.3" + resolved "https://registry.yarnpkg.com/standard/-/standard-10.0.3.tgz#7869bcbf422bdeeaab689a1ffb1fea9677dd50ea" + dependencies: + eslint "~3.19.0" + eslint-config-standard "10.2.1" + eslint-config-standard-jsx "4.0.2" + eslint-plugin-import "~2.2.0" + eslint-plugin-node "~4.2.2" + eslint-plugin-promise "~3.5.0" + eslint-plugin-react "~6.10.0" + eslint-plugin-standard "~3.0.1" + standard-engine "~7.0.0" + +string-width@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +string-width@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + +string_decoder@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab" + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + dependencies: + ansi-regex "^3.0.0" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + +supports-color@3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.1.2.tgz#72a262894d9d408b956ca05ff37b2ed8a6e2a2d5" + dependencies: + has-flag "^1.0.0" + +supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + +supports-color@^3.1.0: + version "3.2.3" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" + dependencies: + has-flag "^1.0.0" + +table@^3.7.8: + version "3.8.3" + resolved "https://registry.yarnpkg.com/table/-/table-3.8.3.tgz#2bbc542f0fda9861a755d3947fefd8b3f513855f" + dependencies: + ajv "^4.7.0" + ajv-keywords "^1.0.0" + chalk "^1.1.1" + lodash "^4.0.0" + slice-ansi "0.0.4" + string-width "^2.0.0" + +text-table@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + +through@^2.3.6: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + dependencies: + prelude-ls "~1.1.2" + +typedarray@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + +uglify-js@^2.6: + version "2.8.29" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" + dependencies: + source-map "~0.5.1" + yargs "~3.10.0" + optionalDependencies: + uglify-to-browserify "~1.0.0" + +uglify-to-browserify@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" + +uniq@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" + +user-home@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/user-home/-/user-home-2.0.0.tgz#9c70bfd8169bc1dcbf48604e0f04b8b49cde9e9f" + dependencies: + os-homedir "^1.0.0" + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + +which@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a" + dependencies: + isexe "^2.0.0" + +window-size@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" + +wordwrap@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" + +wordwrap@^1.0.0, wordwrap@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + +wordwrap@~0.0.2: + version "0.0.3" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + +write@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/write/-/write-0.2.1.tgz#5fc03828e264cea3fe91455476f7a3c566cb0757" + dependencies: + mkdirp "^0.5.1" + +xtend@^4.0.0, xtend@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" + +yargs@~3.10.0: + version "3.10.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" + dependencies: + camelcase "^1.0.2" + cliui "^2.1.0" + decamelize "^1.0.0" + window-size "0.1.0" diff --git a/node_modules/registry-url/index.js b/node_modules/registry-url/index.js new file mode 100644 index 00000000..5502a235 --- /dev/null +++ b/node_modules/registry-url/index.js @@ -0,0 +1,6 @@ +'use strict'; +module.exports = function (scope) { + var rc = require('rc')('npm', {registry: 'https://registry.npmjs.org/'}); + var url = rc[scope + ':registry'] || rc.registry; + return url.slice(-1) === '/' ? url : url + '/'; +}; diff --git a/node_modules/registry-url/license b/node_modules/registry-url/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/registry-url/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/registry-url/node_modules/.bin/rc b/node_modules/registry-url/node_modules/.bin/rc new file mode 120000 index 00000000..e63a3f2a --- /dev/null +++ b/node_modules/registry-url/node_modules/.bin/rc @@ -0,0 +1 @@ +../../../rc/cli.js \ No newline at end of file diff --git a/node_modules/registry-url/package.json b/node_modules/registry-url/package.json new file mode 100644 index 00000000..05be4327 --- /dev/null +++ b/node_modules/registry-url/package.json @@ -0,0 +1,40 @@ +{ + "name": "registry-url", + "version": "3.1.0", + "description": "Get the set npm registry URL", + "license": "MIT", + "repository": "sindresorhus/registry-url", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava --serial" + }, + "files": [ + "index.js" + ], + "keywords": [ + "npm", + "conf", + "config", + "npmconf", + "registry", + "url", + "uri", + "scope" + ], + "dependencies": { + "rc": "^1.0.1" + }, + "devDependencies": { + "ava": "*", + "pify": "^2.3.0", + "require-uncached": "^1.0.2", + "xo": "*" + } +} diff --git a/node_modules/registry-url/readme.md b/node_modules/registry-url/readme.md new file mode 100644 index 00000000..6fc55bc5 --- /dev/null +++ b/node_modules/registry-url/readme.md @@ -0,0 +1,50 @@ +# registry-url [![Build Status](https://travis-ci.org/sindresorhus/registry-url.svg?branch=master)](https://travis-ci.org/sindresorhus/registry-url) + +> Get the set npm registry URL + +It's usually `https://registry.npmjs.org/`, but [configurable](https://www.npmjs.org/doc/misc/npm-config.html#registry). + +Use this if you do anything with the npm registry as users will expect it to use their configured registry. + + +## Install + +``` +$ npm install --save registry-url +``` + + +## Usage + +```ini +# .npmrc +registry = 'https://custom-registry.com/' +``` + +```js +const registryUrl = require('registry-url'); + +console.log(registryUrl()); +//=> 'https://custom-registry.com/' +``` + +It can also retrieve the registry URL associated with an [npm scope](https://docs.npmjs.com/misc/scope). + +```ini +# .npmrc +@myco:registry = 'https://custom-registry.com/' +``` + +```js +const registryUrl = require('registry-url'); + +console.log(registryUrl('@myco')); +//=> 'https://custom-registry.com/' +``` + +If the provided scope is not in the user's `.npmrc` file, then `registry-url` will check for the existence of `registry`, or if that's not set, fallback to the default npm registry. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/remove-trailing-separator/history.md b/node_modules/remove-trailing-separator/history.md new file mode 100644 index 00000000..e15e8a46 --- /dev/null +++ b/node_modules/remove-trailing-separator/history.md @@ -0,0 +1,17 @@ +## History + +### 1.1.0 - 16th Aug 2017 + +- [f4576e3](https://github.com/darsain/remove-trailing-separator/commit/f4576e3638c39b794998b533fffb27854dcbee01) Implement faster slash slicing + +### 1.0.2 - 07th Jun 2017 + +- [8e13ecb](https://github.com/darsain/remove-trailing-separator/commit/8e13ecbfd7b9f5fdf97c5d5ff923e4718b874e31) ES5 compatibility + +### 1.0.1 - 25th Sep 2016 + +- [b78606d](https://github.com/darsain/remove-trailing-separator/commit/af90b4e153a4527894741af6c7005acaeb78606d) Remove backslash only on win32 systems + +### 1.0.0 - 24th Sep 2016 + +Initial release. diff --git a/node_modules/remove-trailing-separator/index.js b/node_modules/remove-trailing-separator/index.js new file mode 100644 index 00000000..512306b8 --- /dev/null +++ b/node_modules/remove-trailing-separator/index.js @@ -0,0 +1,17 @@ +var isWin = process.platform === 'win32'; + +module.exports = function (str) { + var i = str.length - 1; + if (i < 2) { + return str; + } + while (isSeparator(str, i)) { + i--; + } + return str.substr(0, i + 1); +}; + +function isSeparator(str, i) { + var char = str[i]; + return i > 0 && (char === '/' || (isWin && char === '\\')); +} diff --git a/node_modules/remove-trailing-separator/license b/node_modules/remove-trailing-separator/license new file mode 100644 index 00000000..a169aff7 --- /dev/null +++ b/node_modules/remove-trailing-separator/license @@ -0,0 +1,3 @@ +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/remove-trailing-separator/package.json b/node_modules/remove-trailing-separator/package.json new file mode 100644 index 00000000..47ef27a2 --- /dev/null +++ b/node_modules/remove-trailing-separator/package.json @@ -0,0 +1,37 @@ +{ + "name": "remove-trailing-separator", + "version": "1.1.0", + "description": "Removes separators from the end of the string.", + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "lint": "xo", + "pretest": "npm run lint", + "test": "nyc ava", + "report": "nyc report --reporter=html" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/darsain/remove-trailing-separator.git" + }, + "keywords": [ + "remove", + "strip", + "trailing", + "separator" + ], + "author": "darsain", + "license": "ISC", + "bugs": { + "url": "https://github.com/darsain/remove-trailing-separator/issues" + }, + "homepage": "https://github.com/darsain/remove-trailing-separator#readme", + "devDependencies": { + "ava": "^0.16.0", + "coveralls": "^2.11.14", + "nyc": "^8.3.0", + "xo": "^0.16.0" + } +} diff --git a/node_modules/remove-trailing-separator/readme.md b/node_modules/remove-trailing-separator/readme.md new file mode 100644 index 00000000..747086af --- /dev/null +++ b/node_modules/remove-trailing-separator/readme.md @@ -0,0 +1,51 @@ +# remove-trailing-separator + +[![NPM version][npm-img]][npm-url] [![Build Status: Linux][travis-img]][travis-url] [![Build Status: Windows][appveyor-img]][appveyor-url] [![Coverage Status][coveralls-img]][coveralls-url] + +Removes all separators from the end of a string. + +## Install + +``` +npm install remove-trailing-separator +``` + +## Examples + +```js +const removeTrailingSeparator = require('remove-trailing-separator'); + +removeTrailingSeparator('/foo/bar/') // '/foo/bar' +removeTrailingSeparator('/foo/bar///') // '/foo/bar' + +// leaves only/last separator +removeTrailingSeparator('/') // '/' +removeTrailingSeparator('///') // '/' + +// returns empty string +removeTrailingSeparator('') // '' +``` + +## Notable backslash, or win32 separator behavior + +`\` is considered a separator only on WIN32 systems. All POSIX compliant systems +see backslash as a valid file name character, so it would break POSIX compliance +to remove it there. + +In practice, this means that this code will return different things depending on +what system it runs on: + +```js +removeTrailingSeparator('\\foo\\') +// UNIX => '\\foo\\' +// WIN32 => '\\foo' +``` + +[npm-url]: https://npmjs.org/package/remove-trailing-separator +[npm-img]: https://badge.fury.io/js/remove-trailing-separator.svg +[travis-url]: https://travis-ci.org/darsain/remove-trailing-separator +[travis-img]: https://travis-ci.org/darsain/remove-trailing-separator.svg?branch=master +[appveyor-url]: https://ci.appveyor.com/project/darsain/remove-trailing-separator/branch/master +[appveyor-img]: https://ci.appveyor.com/api/projects/status/wvg9a93rrq95n2xl/branch/master?svg=true +[coveralls-url]: https://coveralls.io/github/darsain/remove-trailing-separator?branch=master +[coveralls-img]: https://coveralls.io/repos/github/darsain/remove-trailing-separator/badge.svg?branch=master diff --git a/node_modules/repeat-element/LICENSE b/node_modules/repeat-element/LICENSE new file mode 100644 index 00000000..7cccaf9e --- /dev/null +++ b/node_modules/repeat-element/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/repeat-element/README.md b/node_modules/repeat-element/README.md new file mode 100644 index 00000000..6006418b --- /dev/null +++ b/node_modules/repeat-element/README.md @@ -0,0 +1,99 @@ +# repeat-element [![NPM version](https://img.shields.io/npm/v/repeat-element.svg?style=flat)](https://www.npmjs.com/package/repeat-element) [![NPM monthly downloads](https://img.shields.io/npm/dm/repeat-element.svg?style=flat)](https://npmjs.org/package/repeat-element) [![NPM total downloads](https://img.shields.io/npm/dt/repeat-element.svg?style=flat)](https://npmjs.org/package/repeat-element) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/repeat-element.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/repeat-element) + +> Create an array by repeating the given value n times. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save repeat-element +``` + +## Usage + +```js +const repeat = require('repeat-element'); + +repeat('a', 5); +//=> ['a', 'a', 'a', 'a', 'a'] + +repeat('a', 1); +//=> ['a'] + +repeat('a', 0); +//=> [] + +repeat(null, 5) +//» [ null, null, null, null, null ] + +repeat({some: 'object'}, 5) +//» [ { some: 'object' }, +// { some: 'object' }, +// { some: 'object' }, +// { some: 'object' }, +// { some: 'object' } ] + +repeat(5, 5) +//» [ 5, 5, 5, 5, 5 ] +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 17 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [LinusU](https://github.com/LinusU) | +| 1 | [architectcodes](https://github.com/architectcodes) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on August 19, 2018._ \ No newline at end of file diff --git a/node_modules/repeat-element/index.js b/node_modules/repeat-element/index.js new file mode 100644 index 00000000..06a8d01d --- /dev/null +++ b/node_modules/repeat-element/index.js @@ -0,0 +1,18 @@ +/*! + * repeat-element + * + * Copyright (c) 2015-present, Jon Schlinkert. + * Licensed under the MIT license. + */ + +'use strict'; + +module.exports = function repeat(ele, num) { + var arr = new Array(num); + + for (var i = 0; i < num; i++) { + arr[i] = ele; + } + + return arr; +}; diff --git a/node_modules/repeat-element/package.json b/node_modules/repeat-element/package.json new file mode 100644 index 00000000..7f12ef98 --- /dev/null +++ b/node_modules/repeat-element/package.json @@ -0,0 +1,49 @@ +{ + "name": "repeat-element", + "description": "Create an array by repeating the given value n times.", + "version": "1.1.3", + "homepage": "https://github.com/jonschlinkert/repeat-element", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/repeat-element", + "bugs": { + "url": "https://github.com/jonschlinkert/repeat-element/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "benchmarked": "^2.0.0", + "chalk": "^2.4.1", + "glob": "^7.1.2", + "gulp-format-md": "^1.0.0", + "minimist": "^1.2.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "array", + "element", + "repeat", + "string" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/repeat-string/LICENSE b/node_modules/repeat-string/LICENSE new file mode 100644 index 00000000..39245ac1 --- /dev/null +++ b/node_modules/repeat-string/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/repeat-string/README.md b/node_modules/repeat-string/README.md new file mode 100644 index 00000000..aaa5e91c --- /dev/null +++ b/node_modules/repeat-string/README.md @@ -0,0 +1,136 @@ +# repeat-string [![NPM version](https://img.shields.io/npm/v/repeat-string.svg?style=flat)](https://www.npmjs.com/package/repeat-string) [![NPM monthly downloads](https://img.shields.io/npm/dm/repeat-string.svg?style=flat)](https://npmjs.org/package/repeat-string) [![NPM total downloads](https://img.shields.io/npm/dt/repeat-string.svg?style=flat)](https://npmjs.org/package/repeat-string) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/repeat-string.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/repeat-string) + +> Repeat the given string n times. Fastest implementation for repeating a string. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save repeat-string +``` + +## Usage + +### [repeat](index.js#L41) + +Repeat the given `string` the specified `number` of times. + +**Example:** + +**Example** + +```js +var repeat = require('repeat-string'); +repeat('A', 5); +//=> AAAAA +``` + +**Params** + +* `string` **{String}**: The string to repeat +* `number` **{Number}**: The number of times to repeat the string +* `returns` **{String}**: Repeated string + +## Benchmarks + +Repeat string is significantly faster than the native method (which is itself faster than [repeating](https://github.com/sindresorhus/repeating)): + +```sh +# 2x +repeat-string █████████████████████████ (26,953,977 ops/sec) +repeating █████████ (9,855,695 ops/sec) +native ██████████████████ (19,453,895 ops/sec) + +# 3x +repeat-string █████████████████████████ (19,445,252 ops/sec) +repeating ███████████ (8,661,565 ops/sec) +native ████████████████████ (16,020,598 ops/sec) + +# 10x +repeat-string █████████████████████████ (23,792,521 ops/sec) +repeating █████████ (8,571,332 ops/sec) +native ███████████████ (14,582,955 ops/sec) + +# 50x +repeat-string █████████████████████████ (23,640,179 ops/sec) +repeating █████ (5,505,509 ops/sec) +native ██████████ (10,085,557 ops/sec) + +# 250x +repeat-string █████████████████████████ (23,489,618 ops/sec) +repeating ████ (3,962,937 ops/sec) +native ████████ (7,724,892 ops/sec) + +# 2000x +repeat-string █████████████████████████ (20,315,172 ops/sec) +repeating ████ (3,297,079 ops/sec) +native ███████ (6,203,331 ops/sec) + +# 20000x +repeat-string █████████████████████████ (23,382,915 ops/sec) +repeating ███ (2,980,058 ops/sec) +native █████ (5,578,808 ops/sec) +``` + +**Run the benchmarks** + +Install dev dependencies: + +```sh +npm i -d && node benchmark +``` + +## About + +### Related projects + +[repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor**
| +| --- | --- | +| 51 | [jonschlinkert](https://github.com/jonschlinkert) | +| 2 | [LinusU](https://github.com/LinusU) | +| 2 | [tbusser](https://github.com/tbusser) | +| 1 | [doowb](https://github.com/doowb) | +| 1 | [wooorm](https://github.com/wooorm) | + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](http://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/repeat-string/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.2.0, on October 23, 2016._ \ No newline at end of file diff --git a/node_modules/repeat-string/index.js b/node_modules/repeat-string/index.js new file mode 100644 index 00000000..4459afd8 --- /dev/null +++ b/node_modules/repeat-string/index.js @@ -0,0 +1,70 @@ +/*! + * repeat-string + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +/** + * Results cache + */ + +var res = ''; +var cache; + +/** + * Expose `repeat` + */ + +module.exports = repeat; + +/** + * Repeat the given `string` the specified `number` + * of times. + * + * **Example:** + * + * ```js + * var repeat = require('repeat-string'); + * repeat('A', 5); + * //=> AAAAA + * ``` + * + * @param {String} `string` The string to repeat + * @param {Number} `number` The number of times to repeat the string + * @return {String} Repeated string + * @api public + */ + +function repeat(str, num) { + if (typeof str !== 'string') { + throw new TypeError('expected a string'); + } + + // cover common, quick use cases + if (num === 1) return str; + if (num === 2) return str + str; + + var max = str.length * num; + if (cache !== str || typeof cache === 'undefined') { + cache = str; + res = ''; + } else if (res.length >= max) { + return res.substr(0, max); + } + + while (max > res.length && num > 1) { + if (num & 1) { + res += str; + } + + num >>= 1; + str += str; + } + + res += str; + res = res.substr(0, max); + return res; +} diff --git a/node_modules/repeat-string/package.json b/node_modules/repeat-string/package.json new file mode 100644 index 00000000..09f88929 --- /dev/null +++ b/node_modules/repeat-string/package.json @@ -0,0 +1,77 @@ +{ + "name": "repeat-string", + "description": "Repeat the given string n times. Fastest implementation for repeating a string.", + "version": "1.6.1", + "homepage": "https://github.com/jonschlinkert/repeat-string", + "author": "Jon Schlinkert (http://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://github.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Linus Unnebäck (http://linus.unnebäck.se)", + "Thijs Busser (http://tbusser.net)", + "Titus (wooorm.com)" + ], + "repository": "jonschlinkert/repeat-string", + "bugs": { + "url": "https://github.com/jonschlinkert/repeat-string/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "ansi-cyan": "^0.1.1", + "benchmarked": "^0.2.5", + "gulp-format-md": "^0.1.11", + "isobject": "^2.1.0", + "mocha": "^3.1.2", + "repeating": "^3.0.0", + "text-table": "^0.2.0", + "yargs-parser": "^4.0.2" + }, + "keywords": [ + "fast", + "fastest", + "fill", + "left", + "left-pad", + "multiple", + "pad", + "padding", + "repeat", + "repeating", + "repetition", + "right", + "right-pad", + "string", + "times" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "repeat-element" + ] + }, + "helpers": [ + "./benchmark/helper.js" + ], + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/resolve-url/.jshintrc b/node_modules/resolve-url/.jshintrc new file mode 100644 index 00000000..aaf33580 --- /dev/null +++ b/node_modules/resolve-url/.jshintrc @@ -0,0 +1,44 @@ +{ + "bitwise": true, + "camelcase": true, + "curly": false, + "eqeqeq": true, + "es3": true, + "forin": true, + "immed": false, + "indent": false, + "latedef": "nofunc", + "newcap": false, + "noarg": true, + "noempty": true, + "nonew": false, + "plusplus": false, + "quotmark": false, + "undef": true, + "unused": "vars", + "strict": false, + "trailing": true, + "maxparams": 5, + "maxdepth": false, + "maxstatements": false, + "maxcomplexity": false, + "maxlen": 100, + + "asi": true, + "expr": true, + "globalstrict": true, + "smarttabs": true, + "sub": true, + + "node": true, + "browser": true, + "globals": { + "describe": false, + "it": false, + "before": false, + "beforeEach": false, + "after": false, + "afterEach": false, + "define": false + } +} diff --git a/node_modules/resolve-url/LICENSE b/node_modules/resolve-url/LICENSE new file mode 100644 index 00000000..0595be36 --- /dev/null +++ b/node_modules/resolve-url/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 Simon Lydell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/resolve-url/bower.json b/node_modules/resolve-url/bower.json new file mode 100644 index 00000000..31aa6f4e --- /dev/null +++ b/node_modules/resolve-url/bower.json @@ -0,0 +1,15 @@ +{ + "name": "resolve-url", + "version": "0.2.1", + "description": "Like Node.js’ `path.resolve`/`url.resolve` for the browser.", + "authors": ["Simon Lydell"], + "license": "MIT", + "main": "resolve-url.js", + "keywords": [ + "resolve", + "url" + ], + "ignore": [ + ".*" + ] +} diff --git a/node_modules/resolve-url/changelog.md b/node_modules/resolve-url/changelog.md new file mode 100644 index 00000000..2a4a6304 --- /dev/null +++ b/node_modules/resolve-url/changelog.md @@ -0,0 +1,15 @@ +### Version 0.2.1 (2014-02-25) ### + +- Fix edge case when (accidentally) supplying only one argument, and that + argument happens to be a falsy value such as `undefined` or `null`. + + +### Version 0.2.0 (2014-02-24) ### + +- Disallow passing 0 arguments. It’s weird and inconsistent between browsers. + (Backwards incompatible change.) + + +### Version 0.1.0 (2014-02-23) ### + +- Initial release. diff --git a/node_modules/resolve-url/component.json b/node_modules/resolve-url/component.json new file mode 100644 index 00000000..f37cf005 --- /dev/null +++ b/node_modules/resolve-url/component.json @@ -0,0 +1,15 @@ +{ + "name": "resolve-url", + "version": "0.2.1", + "license": "MIT", + "description": "Like Node.js’ `path.resolve`/`url.resolve` for the browser.", + "main": "resolve-url.js", + "repo": "lydell/resolve-url", + "keywords": [ + "resolve", + "url" + ], + "scripts": [ + "resolve-url.js" + ] +} diff --git a/node_modules/resolve-url/package.json b/node_modules/resolve-url/package.json new file mode 100644 index 00000000..94423663 --- /dev/null +++ b/node_modules/resolve-url/package.json @@ -0,0 +1,34 @@ +{ + "name": "resolve-url", + "version": "0.2.1", + "description": "Like Node.js’ `path.resolve`/`url.resolve` for the browser.", + "author": "Simon Lydell", + "license": "MIT", + "main": "resolve-url.js", + "repository": "lydell/resolve-url", + "keywords": [ + "resolve", + "url" + ], + "scripts": { + "test": "jshint resolve-url.js test/ && testling -u" + }, + "devDependencies": { + "testling": "~1.6.0", + "jshint": "~2.4.3", + "tape": "~2.5.0" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "chrome/latest", + "firefox/latest", + "opera/12", + "opera/latest", + "safari/5", + "iphone/6", + "android-browser/4" + ] + } +} diff --git a/node_modules/resolve-url/readme.md b/node_modules/resolve-url/readme.md new file mode 100644 index 00000000..edfff735 --- /dev/null +++ b/node_modules/resolve-url/readme.md @@ -0,0 +1,83 @@ +Overview +======== + +[![browser support](https://ci.testling.com/lydell/resolve-url.png)](https://ci.testling.com/lydell/resolve-url) + +Like Node.js’ [`path.resolve`]/[`url.resolve`] for the browser. + +```js +var resolveUrl = require("resolve-url") + +window.location +// https://example.com/articles/resolving-urls/edit + +resolveUrl("remove") +// https://example.com/articles/resolving-urls/remove + +resolveUrl("/static/scripts/app.js") +// https://example.com/static/scripts/app.js + +// Imagine /static/scripts/app.js contains `//# sourceMappingURL=../source-maps/app.js.map` +resolveUrl("/static/scripts/app.js", "../source-maps/app.js.map") +// https://example.com/static/source-maps/app.js.map + +resolveUrl("/static/scripts/app.js", "../source-maps/app.js.map", "../coffee/app.coffee") +// https://example.com/static/coffee/app.coffee + +resolveUrl("//cdn.example.com/jquery.js") +// https://cdn.example.com/jquery.js + +resolveUrl("http://foo.org/") +// http://foo.org/ +``` + + +Installation +============ + +- `npm install resolve-url` +- `bower install resolve-url` +- `component install lydell/resolve-url` + +Works with CommonJS, AMD and browser globals, through UMD. + + +Usage +===== + +### `resolveUrl(...urls)` ### + +Pass one or more urls. Resolves the last one to an absolute url, using the +previous ones and `window.location`. + +It’s like starting out on `window.location`, and then clicking links with the +urls as `href` attributes in order, from left to right. + +Unlike Node.js’ [`path.resolve`], this function always goes through all of the +arguments, from left to right. `path.resolve` goes from right to left and only +in the worst case goes through them all. Should that matter. + +Actually, the function is _really_ like clicking a lot of links in series: An +actual `` gets its `href` attribute set for each url! This means that the +url resolution of the browser is used, which makes this module really +light-weight. + +Also note that this functions deals with urls, not paths, so in that respect it +has more in common with Node.js’ [`url.resolve`]. But the arguments are more +like [`path.resolve`]. + +[`path.resolve`]: http://nodejs.org/api/path.html#path_path_resolve_from_to +[`url.resolve`]: http://nodejs.org/api/url.html#url_url_resolve_from_to + + +Tests +===== + +Run `npm test`, which lints the code and then gives you a link to open in a +browser of choice (using `testling`). + + +License +======= + +[The X11 (“MIT”) License](LICENSE). diff --git a/node_modules/resolve-url/resolve-url.js b/node_modules/resolve-url/resolve-url.js new file mode 100644 index 00000000..19e8d040 --- /dev/null +++ b/node_modules/resolve-url/resolve-url.js @@ -0,0 +1,47 @@ +// Copyright 2014 Simon Lydell +// X11 (“MIT”) Licensed. (See LICENSE.) + +void (function(root, factory) { + if (typeof define === "function" && define.amd) { + define(factory) + } else if (typeof exports === "object") { + module.exports = factory() + } else { + root.resolveUrl = factory() + } +}(this, function() { + + function resolveUrl(/* ...urls */) { + var numUrls = arguments.length + + if (numUrls === 0) { + throw new Error("resolveUrl requires at least one argument; got none.") + } + + var base = document.createElement("base") + base.href = arguments[0] + + if (numUrls === 1) { + return base.href + } + + var head = document.getElementsByTagName("head")[0] + head.insertBefore(base, head.firstChild) + + var a = document.createElement("a") + var resolved + + for (var index = 1; index < numUrls; index++) { + a.href = arguments[index] + resolved = a.href + base.href = resolved + } + + head.removeChild(base) + + return resolved + } + + return resolveUrl + +})); diff --git a/node_modules/resolve-url/test/resolve-url.js b/node_modules/resolve-url/test/resolve-url.js new file mode 100644 index 00000000..18532edd --- /dev/null +++ b/node_modules/resolve-url/test/resolve-url.js @@ -0,0 +1,70 @@ +// Copyright 2014 Simon Lydell +// X11 (“MIT”) Licensed. (See LICENSE.) + +var test = require("tape") + +var resolveUrl = require("../") + +"use strict" + +test("resolveUrl", function(t) { + + t.plan(7) + + t.equal(typeof resolveUrl, "function", "is a function") + + t.equal( + resolveUrl("https://example.com/"), + "https://example.com/" + ) + + var loc = "https://example.com/articles/resolving-urls/edit" + + t.equal( + resolveUrl(loc, "remove"), + "https://example.com/articles/resolving-urls/remove" + ) + + t.equal( + resolveUrl(loc, "/static/scripts/app.js"), + "https://example.com/static/scripts/app.js" + ) + + t.equal( + resolveUrl(loc, "/static/scripts/app.js", "../source-maps/app.js.map"), + "https://example.com/static/source-maps/app.js.map" + ) + + t.equal( + resolveUrl(loc, "/static/scripts/app.js", "../source-maps/app.js.map", "../coffee/app.coffee"), + "https://example.com/static/coffee/app.coffee" + ) + + t.equal( + resolveUrl(loc, "//cdn.example.com/jquery.js"), + "https://cdn.example.com/jquery.js" + ) + +}) + +test("edge cases", function(t) { + + t.plan(4) + + t["throws"](resolveUrl, /at least one argument/, "throws with no arguments") + + var accidentallyUndefined + var result + t.doesNotThrow( + function() { result = resolveUrl(accidentallyUndefined) }, + "undefined is still an argument" + ) + t.ok(result.match(/\/undefined$/), "undefined is stringified") + + t.equal( + resolveUrl("http://foo.org/test", undefined, {}, ["a/b"], null), + "http://foo.org/a/null", + "arguments are stringified" + ) + +}) diff --git a/node_modules/ret/LICENSE b/node_modules/ret/LICENSE new file mode 100644 index 00000000..b351ee8e --- /dev/null +++ b/node_modules/ret/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2011 by Roly Fentanes + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/ret/README.md b/node_modules/ret/README.md new file mode 100644 index 00000000..28563e16 --- /dev/null +++ b/node_modules/ret/README.md @@ -0,0 +1,183 @@ +# Regular Expression Tokenizer + +Tokenizes strings that represent a regular expressions. + +[![Build Status](https://secure.travis-ci.org/fent/ret.js.svg)](http://travis-ci.org/fent/ret.js) +[![Dependency Status](https://david-dm.org/fent/ret.js.svg)](https://david-dm.org/fent/ret.js) +[![codecov](https://codecov.io/gh/fent/ret.js/branch/master/graph/badge.svg)](https://codecov.io/gh/fent/ret.js) + +# Usage + +```js +var ret = require('ret'); + +var tokens = ret(/foo|bar/.source); +``` + +`tokens` will contain the following object + +```js +{ + "type": ret.types.ROOT + "options": [ + [ { "type": ret.types.CHAR, "value", 102 }, + { "type": ret.types.CHAR, "value", 111 }, + { "type": ret.types.CHAR, "value", 111 } ], + [ { "type": ret.types.CHAR, "value", 98 }, + { "type": ret.types.CHAR, "value", 97 }, + { "type": ret.types.CHAR, "value", 114 } ] + ] +} +``` + +# Token Types + +`ret.types` is a collection of the various token types exported by ret. + +### ROOT + +Only used in the root of the regexp. This is needed due to the posibility of the root containing a pipe `|` character. In that case, the token will have an `options` key that will be an array of arrays of tokens. If not, it will contain a `stack` key that is an array of tokens. + +```js +{ + "type": ret.types.ROOT, + "stack": [token1, token2...], +} +``` + +```js +{ + "type": ret.types.ROOT, + "options" [ + [token1, token2...], + [othertoken1, othertoken2...] + ... + ], +} +``` + +### GROUP + +Groups contain tokens that are inside of a parenthesis. If the group begins with `?` followed by another character, it's a special type of group. A ':' tells the group not to be remembered when `exec` is used. '=' means the previous token matches only if followed by this group, and '!' means the previous token matches only if NOT followed. + +Like root, it can contain an `options` key instead of `stack` if there is a pipe. + +```js +{ + "type": ret.types.GROUP, + "remember" true, + "followedBy": false, + "notFollowedBy": false, + "stack": [token1, token2...], +} +``` + +```js +{ + "type": ret.types.GROUP, + "remember" true, + "followedBy": false, + "notFollowedBy": false, + "options" [ + [token1, token2...], + [othertoken1, othertoken2...] + ... + ], +} +``` + +### POSITION + +`\b`, `\B`, `^`, and `$` specify positions in the regexp. + +```js +{ + "type": ret.types.POSITION, + "value": "^", +} +``` + +### SET + +Contains a key `set` specifying what tokens are allowed and a key `not` specifying if the set should be negated. A set can contain other sets, ranges, and characters. + +```js +{ + "type": ret.types.SET, + "set": [token1, token2...], + "not": false, +} +``` + +### RANGE + +Used in set tokens to specify a character range. `from` and `to` are character codes. + +```js +{ + "type": ret.types.RANGE, + "from": 97, + "to": 122, +} +``` + +### REPETITION + +```js +{ + "type": ret.types.REPETITION, + "min": 0, + "max": Infinity, + "value": token, +} +``` + +### REFERENCE + +References a group token. `value` is 1-9. + +```js +{ + "type": ret.types.REFERENCE, + "value": 1, +} +``` + +### CHAR + +Represents a single character token. `value` is the character code. This might seem a bit cluttering instead of concatenating characters together. But since repetition tokens only repeat the last token and not the last clause like the pipe, it's simpler to do it this way. + +```js +{ + "type": ret.types.CHAR, + "value": 123, +} +``` + +## Errors + +ret.js will throw errors if given a string with an invalid regular expression. All possible errors are + +* Invalid group. When a group with an immediate `?` character is followed by an invalid character. It can only be followed by `!`, `=`, or `:`. Example: `/(?_abc)/` +* Nothing to repeat. Thrown when a repetitional token is used as the first token in the current clause, as in right in the beginning of the regexp or group, or right after a pipe. Example: `/foo|?bar/`, `/{1,3}foo|bar/`, `/foo(+bar)/` +* Unmatched ). A group was not opened, but was closed. Example: `/hello)2u/` +* Unterminated group. A group was not closed. Example: `/(1(23)4/` +* Unterminated character class. A custom character set was not closed. Example: `/[abc/` + + +# Install + + npm install ret + + +# Tests + +Tests are written with [vows](http://vowsjs.org/) + +```bash +npm test +``` + +# License + +MIT diff --git a/node_modules/ret/lib/index.js b/node_modules/ret/lib/index.js new file mode 100644 index 00000000..0e151c39 --- /dev/null +++ b/node_modules/ret/lib/index.js @@ -0,0 +1,282 @@ +var util = require('./util'); +var types = require('./types'); +var sets = require('./sets'); +var positions = require('./positions'); + + +module.exports = function(regexpStr) { + var i = 0, l, c, + start = { type: types.ROOT, stack: []}, + + // Keep track of last clause/group and stack. + lastGroup = start, + last = start.stack, + groupStack = []; + + + var repeatErr = function(i) { + util.error(regexpStr, 'Nothing to repeat at column ' + (i - 1)); + }; + + // Decode a few escaped characters. + var str = util.strToChars(regexpStr); + l = str.length; + + // Iterate through each character in string. + while (i < l) { + c = str[i++]; + + switch (c) { + // Handle escaped characters, inclues a few sets. + case '\\': + c = str[i++]; + + switch (c) { + case 'b': + last.push(positions.wordBoundary()); + break; + + case 'B': + last.push(positions.nonWordBoundary()); + break; + + case 'w': + last.push(sets.words()); + break; + + case 'W': + last.push(sets.notWords()); + break; + + case 'd': + last.push(sets.ints()); + break; + + case 'D': + last.push(sets.notInts()); + break; + + case 's': + last.push(sets.whitespace()); + break; + + case 'S': + last.push(sets.notWhitespace()); + break; + + default: + // Check if c is integer. + // In which case it's a reference. + if (/\d/.test(c)) { + last.push({ type: types.REFERENCE, value: parseInt(c, 10) }); + + // Escaped character. + } else { + last.push({ type: types.CHAR, value: c.charCodeAt(0) }); + } + } + + break; + + + // Positionals. + case '^': + last.push(positions.begin()); + break; + + case '$': + last.push(positions.end()); + break; + + + // Handle custom sets. + case '[': + // Check if this class is 'anti' i.e. [^abc]. + var not; + if (str[i] === '^') { + not = true; + i++; + } else { + not = false; + } + + // Get all the characters in class. + var classTokens = util.tokenizeClass(str.slice(i), regexpStr); + + // Increase index by length of class. + i += classTokens[1]; + last.push({ + type: types.SET, + set: classTokens[0], + not: not, + }); + + break; + + + // Class of any character except \n. + case '.': + last.push(sets.anyChar()); + break; + + + // Push group onto stack. + case '(': + // Create group. + var group = { + type: types.GROUP, + stack: [], + remember: true, + }; + + c = str[i]; + + // If if this is a special kind of group. + if (c === '?') { + c = str[i + 1]; + i += 2; + + // Match if followed by. + if (c === '=') { + group.followedBy = true; + + // Match if not followed by. + } else if (c === '!') { + group.notFollowedBy = true; + + } else if (c !== ':') { + util.error(regexpStr, + 'Invalid group, character \'' + c + + '\' after \'?\' at column ' + (i - 1)); + } + + group.remember = false; + } + + // Insert subgroup into current group stack. + last.push(group); + + // Remember the current group for when the group closes. + groupStack.push(lastGroup); + + // Make this new group the current group. + lastGroup = group; + last = group.stack; + break; + + + // Pop group out of stack. + case ')': + if (groupStack.length === 0) { + util.error(regexpStr, 'Unmatched ) at column ' + (i - 1)); + } + lastGroup = groupStack.pop(); + + // Check if this group has a PIPE. + // To get back the correct last stack. + last = lastGroup.options ? + lastGroup.options[lastGroup.options.length - 1] : lastGroup.stack; + break; + + + // Use pipe character to give more choices. + case '|': + // Create array where options are if this is the first PIPE + // in this clause. + if (!lastGroup.options) { + lastGroup.options = [lastGroup.stack]; + delete lastGroup.stack; + } + + // Create a new stack and add to options for rest of clause. + var stack = []; + lastGroup.options.push(stack); + last = stack; + break; + + + // Repetition. + // For every repetition, remove last element from last stack + // then insert back a RANGE object. + // This design is chosen because there could be more than + // one repetition symbols in a regex i.e. `a?+{2,3}`. + case '{': + var rs = /^(\d+)(,(\d+)?)?\}/.exec(str.slice(i)), min, max; + if (rs !== null) { + if (last.length === 0) { + repeatErr(i); + } + min = parseInt(rs[1], 10); + max = rs[2] ? rs[3] ? parseInt(rs[3], 10) : Infinity : min; + i += rs[0].length; + + last.push({ + type: types.REPETITION, + min: min, + max: max, + value: last.pop(), + }); + } else { + last.push({ + type: types.CHAR, + value: 123, + }); + } + break; + + case '?': + if (last.length === 0) { + repeatErr(i); + } + last.push({ + type: types.REPETITION, + min: 0, + max: 1, + value: last.pop(), + }); + break; + + case '+': + if (last.length === 0) { + repeatErr(i); + } + last.push({ + type: types.REPETITION, + min: 1, + max: Infinity, + value: last.pop(), + }); + break; + + case '*': + if (last.length === 0) { + repeatErr(i); + } + last.push({ + type: types.REPETITION, + min: 0, + max: Infinity, + value: last.pop(), + }); + break; + + + // Default is a character that is not `\[](){}?+*^$`. + default: + last.push({ + type: types.CHAR, + value: c.charCodeAt(0), + }); + } + + } + + // Check if any groups have not been closed. + if (groupStack.length !== 0) { + util.error(regexpStr, 'Unterminated group'); + } + + return start; +}; + +module.exports.types = types; diff --git a/node_modules/ret/lib/positions.js b/node_modules/ret/lib/positions.js new file mode 100644 index 00000000..80677ee5 --- /dev/null +++ b/node_modules/ret/lib/positions.js @@ -0,0 +1,17 @@ +var types = require('./types'); + +exports.wordBoundary = function() { + return { type: types.POSITION, value: 'b' }; +}; + +exports.nonWordBoundary = function() { + return { type: types.POSITION, value: 'B' }; +}; + +exports.begin = function() { + return { type: types.POSITION, value: '^' }; +}; + +exports.end = function() { + return { type: types.POSITION, value: '$' }; +}; diff --git a/node_modules/ret/lib/sets.js b/node_modules/ret/lib/sets.js new file mode 100644 index 00000000..5fb6be5d --- /dev/null +++ b/node_modules/ret/lib/sets.js @@ -0,0 +1,82 @@ +var types = require('./types'); + +var INTS = function() { + return [{ type: types.RANGE , from: 48, to: 57 }]; +}; + +var WORDS = function() { + return [ + { type: types.CHAR, value: 95 }, + { type: types.RANGE, from: 97, to: 122 }, + { type: types.RANGE, from: 65, to: 90 } + ].concat(INTS()); +}; + +var WHITESPACE = function() { + return [ + { type: types.CHAR, value: 9 }, + { type: types.CHAR, value: 10 }, + { type: types.CHAR, value: 11 }, + { type: types.CHAR, value: 12 }, + { type: types.CHAR, value: 13 }, + { type: types.CHAR, value: 32 }, + { type: types.CHAR, value: 160 }, + { type: types.CHAR, value: 5760 }, + { type: types.CHAR, value: 6158 }, + { type: types.CHAR, value: 8192 }, + { type: types.CHAR, value: 8193 }, + { type: types.CHAR, value: 8194 }, + { type: types.CHAR, value: 8195 }, + { type: types.CHAR, value: 8196 }, + { type: types.CHAR, value: 8197 }, + { type: types.CHAR, value: 8198 }, + { type: types.CHAR, value: 8199 }, + { type: types.CHAR, value: 8200 }, + { type: types.CHAR, value: 8201 }, + { type: types.CHAR, value: 8202 }, + { type: types.CHAR, value: 8232 }, + { type: types.CHAR, value: 8233 }, + { type: types.CHAR, value: 8239 }, + { type: types.CHAR, value: 8287 }, + { type: types.CHAR, value: 12288 }, + { type: types.CHAR, value: 65279 } + ]; +}; + +var NOTANYCHAR = function() { + return [ + { type: types.CHAR, value: 10 }, + { type: types.CHAR, value: 13 }, + { type: types.CHAR, value: 8232 }, + { type: types.CHAR, value: 8233 }, + ]; +}; + +// Predefined class objects. +exports.words = function() { + return { type: types.SET, set: WORDS(), not: false }; +}; + +exports.notWords = function() { + return { type: types.SET, set: WORDS(), not: true }; +}; + +exports.ints = function() { + return { type: types.SET, set: INTS(), not: false }; +}; + +exports.notInts = function() { + return { type: types.SET, set: INTS(), not: true }; +}; + +exports.whitespace = function() { + return { type: types.SET, set: WHITESPACE(), not: false }; +}; + +exports.notWhitespace = function() { + return { type: types.SET, set: WHITESPACE(), not: true }; +}; + +exports.anyChar = function() { + return { type: types.SET, set: NOTANYCHAR(), not: true }; +}; diff --git a/node_modules/ret/lib/types.js b/node_modules/ret/lib/types.js new file mode 100644 index 00000000..94841451 --- /dev/null +++ b/node_modules/ret/lib/types.js @@ -0,0 +1,10 @@ +module.exports = { + ROOT : 0, + GROUP : 1, + POSITION : 2, + SET : 3, + RANGE : 4, + REPETITION : 5, + REFERENCE : 6, + CHAR : 7, +}; diff --git a/node_modules/ret/lib/util.js b/node_modules/ret/lib/util.js new file mode 100644 index 00000000..97d8cf51 --- /dev/null +++ b/node_modules/ret/lib/util.js @@ -0,0 +1,111 @@ +var types = require('./types'); +var sets = require('./sets'); + + +// All of these are private and only used by randexp. +// It's assumed that they will always be called with the correct input. + +var CTRL = '@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^ ?'; +var SLSH = { '0': 0, 't': 9, 'n': 10, 'v': 11, 'f': 12, 'r': 13 }; + +/** + * Finds character representations in str and convert all to + * their respective characters + * + * @param {String} str + * @return {String} + */ +exports.strToChars = function(str) { + /* jshint maxlen: false */ + var chars_regex = /(\[\\b\])|(\\)?\\(?:u([A-F0-9]{4})|x([A-F0-9]{2})|(0?[0-7]{2})|c([@A-Z\[\\\]\^?])|([0tnvfr]))/g; + str = str.replace(chars_regex, function(s, b, lbs, a16, b16, c8, dctrl, eslsh) { + if (lbs) { + return s; + } + + var code = b ? 8 : + a16 ? parseInt(a16, 16) : + b16 ? parseInt(b16, 16) : + c8 ? parseInt(c8, 8) : + dctrl ? CTRL.indexOf(dctrl) : + SLSH[eslsh]; + + var c = String.fromCharCode(code); + + // Escape special regex characters. + if (/[\[\]{}\^$.|?*+()]/.test(c)) { + c = '\\' + c; + } + + return c; + }); + + return str; +}; + + +/** + * turns class into tokens + * reads str until it encounters a ] not preceeded by a \ + * + * @param {String} str + * @param {String} regexpStr + * @return {Array., Number>} + */ +exports.tokenizeClass = function(str, regexpStr) { + /* jshint maxlen: false */ + var tokens = []; + var regexp = /\\(?:(w)|(d)|(s)|(W)|(D)|(S))|((?:(?:\\)(.)|([^\]\\]))-(?:\\)?([^\]]))|(\])|(?:\\)?(.)/g; + var rs, c; + + + while ((rs = regexp.exec(str)) != null) { + if (rs[1]) { + tokens.push(sets.words()); + + } else if (rs[2]) { + tokens.push(sets.ints()); + + } else if (rs[3]) { + tokens.push(sets.whitespace()); + + } else if (rs[4]) { + tokens.push(sets.notWords()); + + } else if (rs[5]) { + tokens.push(sets.notInts()); + + } else if (rs[6]) { + tokens.push(sets.notWhitespace()); + + } else if (rs[7]) { + tokens.push({ + type: types.RANGE, + from: (rs[8] || rs[9]).charCodeAt(0), + to: rs[10].charCodeAt(0), + }); + + } else if (c = rs[12]) { + tokens.push({ + type: types.CHAR, + value: c.charCodeAt(0), + }); + + } else { + return [tokens, regexp.lastIndex]; + } + } + + exports.error(regexpStr, 'Unterminated character class'); +}; + + +/** + * Shortcut to throw errors. + * + * @param {String} regexp + * @param {String} msg + */ +exports.error = function(regexp, msg) { + throw new SyntaxError('Invalid regular expression: /' + regexp + '/: ' + msg); +}; diff --git a/node_modules/ret/package.json b/node_modules/ret/package.json new file mode 100644 index 00000000..2fde9a44 --- /dev/null +++ b/node_modules/ret/package.json @@ -0,0 +1,35 @@ +{ + "name": "ret", + "description": "Tokenizes a string that represents a regular expression.", + "keywords": [ + "regex", + "regexp", + "regular expression", + "parser", + "tokenizer" + ], + "version": "0.1.15", + "repository": { + "type": "git", + "url": "git://github.com/fent/ret.js.git" + }, + "author": "Roly Fentanes (https://github.com/fent)", + "main": "./lib/index.js", + "files": [ + "lib" + ], + "scripts": { + "test": "istanbul cover vows -- --spec test/*-test.js" + }, + "directories": { + "lib": "./lib" + }, + "devDependencies": { + "istanbul": "*", + "vows": "*" + }, + "engines": { + "node": ">=0.12" + }, + "license": "MIT" +} diff --git a/node_modules/rimraf/LICENSE b/node_modules/rimraf/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/rimraf/README.md b/node_modules/rimraf/README.md new file mode 100644 index 00000000..423b8cf8 --- /dev/null +++ b/node_modules/rimraf/README.md @@ -0,0 +1,101 @@ +[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies) + +The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node. + +Install with `npm install rimraf`, or just drop rimraf.js somewhere. + +## API + +`rimraf(f, [opts], callback)` + +The first parameter will be interpreted as a globbing pattern for files. If you +want to disable globbing you can do so with `opts.disableGlob` (defaults to +`false`). This might be handy, for instance, if you have filenames that contain +globbing wildcard characters. + +The callback will be called with an error if there is one. Certain +errors are handled for you: + +* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of + `opts.maxBusyTries` times before giving up, adding 100ms of wait + between each attempt. The default `maxBusyTries` is 3. +* `ENOENT` - If the file doesn't exist, rimraf will return + successfully, since your desired outcome is already the case. +* `EMFILE` - Since `readdir` requires opening a file descriptor, it's + possible to hit `EMFILE` if too many file descriptors are in use. + In the sync case, there's nothing to be done for this. But in the + async case, rimraf will gradually back off with timeouts up to + `opts.emfileWait` ms, which defaults to 1000. + +## options + +* unlink, chmod, stat, lstat, rmdir, readdir, + unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync + + In order to use a custom file system library, you can override + specific fs functions on the options object. + + If any of these functions are present on the options object, then + the supplied function will be used instead of the default fs + method. + + Sync methods are only relevant for `rimraf.sync()`, of course. + + For example: + + ```javascript + var myCustomFS = require('some-custom-fs') + + rimraf('some-thing', myCustomFS, callback) + ``` + +* maxBusyTries + + If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered + on Windows systems, then rimraf will retry with a linear backoff + wait of 100ms longer on each try. The default maxBusyTries is 3. + + Only relevant for async usage. + +* emfileWait + + If an `EMFILE` error is encountered, then rimraf will retry + repeatedly with a linear backoff of 1ms longer on each try, until + the timeout counter hits this max. The default limit is 1000. + + If you repeatedly encounter `EMFILE` errors, then consider using + [graceful-fs](http://npm.im/graceful-fs) in your program. + + Only relevant for async usage. + +* glob + + Set to `false` to disable [glob](http://npm.im/glob) pattern + matching. + + Set to an object to pass options to the glob module. The default + glob options are `{ nosort: true, silent: true }`. + + Glob version 6 is used in this module. + + Relevant for both sync and async usage. + +* disableGlob + + Set to any non-falsey value to disable globbing entirely. + (Equivalent to setting `glob: false`.) + +## rimraf.sync + +It can remove stuff synchronously, too. But that's not so good. Use +the async API. It's better. + +## CLI + +If installed with `npm install rimraf -g` it can be used as a global +command `rimraf [ ...]` which is useful for cross platform support. + +## mkdirp + +If you need to create a directory recursively, check out +[mkdirp](https://github.com/substack/node-mkdirp). diff --git a/node_modules/rimraf/bin.js b/node_modules/rimraf/bin.js new file mode 100755 index 00000000..0d1e17be --- /dev/null +++ b/node_modules/rimraf/bin.js @@ -0,0 +1,50 @@ +#!/usr/bin/env node + +var rimraf = require('./') + +var help = false +var dashdash = false +var noglob = false +var args = process.argv.slice(2).filter(function(arg) { + if (dashdash) + return !!arg + else if (arg === '--') + dashdash = true + else if (arg === '--no-glob' || arg === '-G') + noglob = true + else if (arg === '--glob' || arg === '-g') + noglob = false + else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) + help = true + else + return !!arg +}) + +if (help || args.length === 0) { + // If they didn't ask for help, then this is not a "success" + var log = help ? console.log : console.error + log('Usage: rimraf [ ...]') + log('') + log(' Deletes all files and folders at "path" recursively.') + log('') + log('Options:') + log('') + log(' -h, --help Display this usage info') + log(' -G, --no-glob Do not expand glob patterns in arguments') + log(' -g, --glob Expand glob patterns in arguments (default)') + process.exit(help ? 0 : 1) +} else + go(0) + +function go (n) { + if (n >= args.length) + return + var options = {} + if (noglob) + options = { glob: false } + rimraf(args[n], options, function (er) { + if (er) + throw er + go(n+1) + }) +} diff --git a/node_modules/rimraf/package.json b/node_modules/rimraf/package.json new file mode 100644 index 00000000..783fae92 --- /dev/null +++ b/node_modules/rimraf/package.json @@ -0,0 +1,29 @@ +{ + "name": "rimraf", + "version": "2.6.3", + "main": "rimraf.js", + "description": "A deep deletion module for node (like `rm -rf`)", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": "git://github.com/isaacs/rimraf.git", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags", + "test": "tap test/*.js" + }, + "bin": "./bin.js", + "dependencies": { + "glob": "^7.1.3" + }, + "files": [ + "LICENSE", + "README.md", + "bin.js", + "rimraf.js" + ], + "devDependencies": { + "mkdirp": "^0.5.1", + "tap": "^12.1.1" + } +} diff --git a/node_modules/rimraf/rimraf.js b/node_modules/rimraf/rimraf.js new file mode 100644 index 00000000..e80dd106 --- /dev/null +++ b/node_modules/rimraf/rimraf.js @@ -0,0 +1,364 @@ +module.exports = rimraf +rimraf.sync = rimrafSync + +var assert = require("assert") +var path = require("path") +var fs = require("fs") +var glob = require("glob") +var _0666 = parseInt('666', 8) + +var defaultGlobOpts = { + nosort: true, + silent: true +} + +// for EMFILE handling +var timeout = 0 + +var isWindows = (process.platform === "win32") + +function defaults (options) { + var methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(function(m) { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} + +function rimraf (p, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + var busyTries = 0 + var errState = null + var n = 0 + + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) + + options.lstat(p, function (er, stat) { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + + function next (er) { + errState = errState || er + if (--n === 0) + cb(errState) + } + + function afterGlob (er, results) { + if (er) + return cb(er) + + n = results.length + if (n === 0) + return cb() + + results.forEach(function (p) { + rimraf_(p, options, function CB (er) { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + var time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(function () { + rimraf_(p, options, CB) + }, time) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(function () { + rimraf_(p, options, CB) + }, timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + }) + }) + } +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, function (er, st) { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, function (er) { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} + +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + if (er) + assert(er instanceof Error) + + options.chmod(p, _0666, function (er2) { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, function(er3, stats) { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} + +function fixWinEPERMSync (p, options, er) { + assert(p) + assert(options) + if (er) + assert(er instanceof Error) + + try { + options.chmodSync(p, _0666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } + + try { + var stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } + + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) +} + +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, function (er) { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} + +function rmkids(p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, function (er, files) { + if (er) + return cb(er) + var n = files.length + if (n === 0) + return options.rmdir(p, cb) + var errState + files.forEach(function (f) { + rimraf(path.join(p, f), options, function (er) { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + var results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + + if (!results.length) + return + + for (var i = 0; i < results.length; i++) { + var p = results[i] + + try { + var st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) + } + } +} + +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} + +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(function (f) { + rimrafSync(path.join(p, f), options) + }) + + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + var retries = isWindows ? 100 : 1 + var i = 0 + do { + var threw = true + try { + var ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) +} diff --git a/node_modules/safe-buffer/LICENSE b/node_modules/safe-buffer/LICENSE new file mode 100644 index 00000000..0c068cee --- /dev/null +++ b/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/safe-buffer/README.md b/node_modules/safe-buffer/README.md new file mode 100644 index 00000000..e9a81afd --- /dev/null +++ b/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/safe-buffer/index.d.ts b/node_modules/safe-buffer/index.d.ts new file mode 100644 index 00000000..e9fed809 --- /dev/null +++ b/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/safe-buffer/index.js b/node_modules/safe-buffer/index.js new file mode 100644 index 00000000..22438dab --- /dev/null +++ b/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/safe-buffer/package.json b/node_modules/safe-buffer/package.json new file mode 100644 index 00000000..623fbc3f --- /dev/null +++ b/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.1.2", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/safe-regex/.travis.yml b/node_modules/safe-regex/.travis.yml new file mode 100644 index 00000000..cc4dba29 --- /dev/null +++ b/node_modules/safe-regex/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/safe-regex/LICENSE b/node_modules/safe-regex/LICENSE new file mode 100644 index 00000000..ee27ba4b --- /dev/null +++ b/node_modules/safe-regex/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/safe-regex/example/safe.js b/node_modules/safe-regex/example/safe.js new file mode 100644 index 00000000..f486f592 --- /dev/null +++ b/node_modules/safe-regex/example/safe.js @@ -0,0 +1,3 @@ +var safe = require('../'); +var regex = process.argv.slice(2).join(' '); +console.log(safe(regex)); diff --git a/node_modules/safe-regex/index.js b/node_modules/safe-regex/index.js new file mode 100644 index 00000000..488f5014 --- /dev/null +++ b/node_modules/safe-regex/index.js @@ -0,0 +1,43 @@ +var parse = require('ret'); +var types = parse.types; + +module.exports = function (re, opts) { + if (!opts) opts = {}; + var replimit = opts.limit === undefined ? 25 : opts.limit; + + if (isRegExp(re)) re = re.source; + else if (typeof re !== 'string') re = String(re); + + try { re = parse(re) } + catch (err) { return false } + + var reps = 0; + return (function walk (node, starHeight) { + if (node.type === types.REPETITION) { + starHeight ++; + reps ++; + if (starHeight > 1) return false; + if (reps > replimit) return false; + } + + if (node.options) { + for (var i = 0, len = node.options.length; i < len; i++) { + var ok = walk({ stack: node.options[i] }, starHeight); + if (!ok) return false; + } + } + var stack = node.stack || (node.value && node.value.stack); + if (!stack) return true; + + for (var i = 0; i < stack.length; i++) { + var ok = walk(stack[i], starHeight); + if (!ok) return false; + } + + return true; + })(re, 0); +}; + +function isRegExp (x) { + return {}.toString.call(x) === '[object RegExp]'; +} diff --git a/node_modules/safe-regex/package.json b/node_modules/safe-regex/package.json new file mode 100644 index 00000000..0dd631b5 --- /dev/null +++ b/node_modules/safe-regex/package.json @@ -0,0 +1,43 @@ +{ + "name": "safe-regex", + "version": "1.1.0", + "description": "detect possibly catastrophic, exponential-time regular expressions", + "main": "index.js", + "dependencies": { + "ret": "~0.1.10" + }, + "devDependencies": { + "tape": "^3.5.0" + }, + "scripts": { + "test": "tape test/*.js" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8", "ie/9", "ie/10", + "firefox/latest", + "chrome/latest", + "opera/latest", + "safari/latest" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/substack/safe-regex.git" + }, + "homepage": "https://github.com/substack/safe-regex", + "keywords": [ + "catastrophic", + "exponential", + "regex", + "safe", + "sandbox" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT" +} diff --git a/node_modules/safe-regex/readme.markdown b/node_modules/safe-regex/readme.markdown new file mode 100644 index 00000000..83673ac3 --- /dev/null +++ b/node_modules/safe-regex/readme.markdown @@ -0,0 +1,65 @@ +# safe-regex + +detect potentially +[catastrophic](http://regular-expressions.mobi/catastrophic.html) +[exponential-time](http://perlgeek.de/blog-en/perl-tips/in-search-of-an-exponetial-regexp.html) +regular expressions by limiting the +[star height](https://en.wikipedia.org/wiki/Star_height) to 1 + +WARNING: This module merely *seems* to work given all the catastrophic regular +expressions I could find scouring the internet, but I don't have enough of a +background in automata to be absolutely sure that this module will catch all +exponential-time cases. + +[![browser support](https://ci.testling.com/substack/safe-regex.png)](https://ci.testling.com/substack/safe-regex) + +[![build status](https://secure.travis-ci.org/substack/safe-regex.png)](http://travis-ci.org/substack/safe-regex) + +# example + +``` js +var safe = require('safe-regex'); +var regex = process.argv.slice(2).join(' '); +console.log(safe(regex)); +``` + +``` +$ node safe.js '(x+x+)+y' +false +$ node safe.js '(beep|boop)*' +true +$ node safe.js '(a+){10}' +false +$ node safe.js '\blocation\s*:[^:\n]+\b(Oakland|San Francisco)\b' +true +``` + +# methods + +``` js +var safe = require('safe-regex') +``` + +## var ok = safe(re, opts={}) + +Return a boolean `ok` whether or not the regex `re` is safe and not possibly +catastrophic. + +`re` can be a `RegExp` object or just a string. + +If the `re` is a string and is an invalid regex, returns `false`. + +* `opts.limit` - maximum number of allowed repetitions in the entire regex. +Default: `25`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install safe-regex +``` + +# license + +MIT diff --git a/node_modules/safe-regex/test/regex.js b/node_modules/safe-regex/test/regex.js new file mode 100644 index 00000000..0bda8504 --- /dev/null +++ b/node_modules/safe-regex/test/regex.js @@ -0,0 +1,50 @@ +var safe = require('../'); +var test = require('tape'); + +var good = [ + /\bOakland\b/, + /\b(Oakland|San Francisco)\b/i, + /^\d+1337\d+$/i, + /^\d+(1337|404)\d+$/i, + /^\d+(1337|404)*\d+$/i, + RegExp(Array(26).join('a?') + Array(26).join('a')), +]; + +test('safe regex', function (t) { + t.plan(good.length); + good.forEach(function (re) { + t.equal(safe(re), true); + }); +}); + + +var bad = [ + /^(a?){25}(a){25}$/, + RegExp(Array(27).join('a?') + Array(27).join('a')), + /(x+x+)+y/, + /foo|(x+x+)+y/, + /(a+){10}y/, + /(a+){2}y/, + /(.*){1,32000}[bc]/ +]; + +test('unsafe regex', function (t) { + t.plan(bad.length); + bad.forEach(function (re) { + t.equal(safe(re), false); + }); +}); + +var invalid = [ + '*Oakland*', + 'hey(yoo))', + 'abcde(?>hellow)', + '[abc' +]; + +test('invalid regex', function (t) { + t.plan(invalid.length); + invalid.forEach(function (re) { + t.equal(safe(re), false); + }); +}); diff --git a/node_modules/safer-buffer/LICENSE b/node_modules/safer-buffer/LICENSE new file mode 100644 index 00000000..4fe9e6f1 --- /dev/null +++ b/node_modules/safer-buffer/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Nikita Skovoroda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/safer-buffer/Porting-Buffer.md b/node_modules/safer-buffer/Porting-Buffer.md new file mode 100644 index 00000000..68d86bab --- /dev/null +++ b/node_modules/safer-buffer/Porting-Buffer.md @@ -0,0 +1,268 @@ +# Porting to the Buffer.from/Buffer.alloc API + + +## Overview + +- [Variant 1: Drop support for Node.js ≤ 4.4.x and 5.0.0 — 5.9.x.](#variant-1) (*recommended*) +- [Variant 2: Use a polyfill](#variant-2) +- [Variant 3: manual detection, with safeguards](#variant-3) + +### Finding problematic bits of code using grep + +Just run `grep -nrE '[^a-zA-Z](Slow)?Buffer\s*\(' --exclude-dir node_modules`. + +It will find all the potentially unsafe places in your own code (with some considerably unlikely +exceptions). + +### Finding problematic bits of code using Node.js 8 + +If you’re using Node.js ≥ 8.0.0 (which is recommended), Node.js exposes multiple options that help with finding the relevant pieces of code: + +- `--trace-warnings` will make Node.js show a stack trace for this warning and other warnings that are printed by Node.js. +- `--trace-deprecation` does the same thing, but only for deprecation warnings. +- `--pending-deprecation` will show more types of deprecation warnings. In particular, it will show the `Buffer()` deprecation warning, even on Node.js 8. + +You can set these flags using an environment variable: + +```console +$ export NODE_OPTIONS='--trace-warnings --pending-deprecation' +$ cat example.js +'use strict'; +const foo = new Buffer('foo'); +$ node example.js +(node:7147) [DEP0005] DeprecationWarning: The Buffer() and new Buffer() constructors are not recommended for use due to security and usability concerns. Please use the new Buffer.alloc(), Buffer.allocUnsafe(), or Buffer.from() construction methods instead. + at showFlaggedDeprecation (buffer.js:127:13) + at new Buffer (buffer.js:148:3) + at Object. (/path/to/example.js:2:13) + [... more stack trace lines ...] +``` + +### Finding problematic bits of code using linters + +Eslint rules [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +also find calls to deprecated `Buffer()` API. Those rules are included in some pre-sets. + +There is a drawback, though, that it doesn't always +[work correctly](https://github.com/chalker/safer-buffer#why-not-safe-buffer) when `Buffer` is +overriden e.g. with a polyfill, so recommended is a combination of this and some other method +described above. + + +## Variant 1: Drop support for Node.js ≤ 4.4.x and 5.0.0 — 5.9.x. + +This is the recommended solution nowadays that would imply only minimal overhead. + +The Node.js 5.x release line has been unsupported since July 2016, and the Node.js 4.x release line reaches its End of Life in April 2018 (→ [Schedule](https://github.com/nodejs/Release#release-schedule)). This means that these versions of Node.js will *not* receive any updates, even in case of security issues, so using these release lines should be avoided, if at all possible. + +What you would do in this case is to convert all `new Buffer()` or `Buffer()` calls to use `Buffer.alloc()` or `Buffer.from()`, in the following way: + +- For `new Buffer(number)`, replace it with `Buffer.alloc(number)`. +- For `new Buffer(string)` (or `new Buffer(string, encoding)`), replace it with `Buffer.from(string)` (or `Buffer.from(string, encoding)`). +- For all other combinations of arguments (these are much rarer), also replace `new Buffer(...arguments)` with `Buffer.from(...arguments)`. + +Note that `Buffer.alloc()` is also _faster_ on the current Node.js versions than +`new Buffer(size).fill(0)`, which is what you would otherwise need to ensure zero-filling. + +Enabling eslint rule [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +is recommended to avoid accidential unsafe Buffer API usage. + +There is also a [JSCodeshift codemod](https://github.com/joyeecheung/node-dep-codemod#dep005) +for automatically migrating Buffer constructors to `Buffer.alloc()` or `Buffer.from()`. +Note that it currently only works with cases where the arguments are literals or where the +constructor is invoked with two arguments. + +_If you currently support those older Node.js versions and dropping them would be a semver-major change +for you, or if you support older branches of your packages, consider using [Variant 2](#variant-2) +or [Variant 3](#variant-3) on older branches, so people using those older branches will also receive +the fix. That way, you will eradicate potential issues caused by unguarded Buffer API usage and +your users will not observe a runtime deprecation warning when running your code on Node.js 10._ + + +## Variant 2: Use a polyfill + +Utilize [safer-buffer](https://www.npmjs.com/package/safer-buffer) as a polyfill to support older +Node.js versions. + +You would take exacly the same steps as in [Variant 1](#variant-1), but with a polyfill +`const Buffer = require('safer-buffer').Buffer` in all files where you use the new `Buffer` api. + +Make sure that you do not use old `new Buffer` API — in any files where the line above is added, +using old `new Buffer()` API will _throw_. It will be easy to notice that in CI, though. + +Alternatively, you could use [buffer-from](https://www.npmjs.com/package/buffer-from) and/or +[buffer-alloc](https://www.npmjs.com/package/buffer-alloc) [ponyfills](https://ponyfill.com/) — +those are great, the only downsides being 4 deps in the tree and slightly more code changes to +migrate off them (as you would be using e.g. `Buffer.from` under a different name). If you need only +`Buffer.from` polyfilled — `buffer-from` alone which comes with no extra dependencies. + +_Alternatively, you could use [safe-buffer](https://www.npmjs.com/package/safe-buffer) — it also +provides a polyfill, but takes a different approach which has +[it's drawbacks](https://github.com/chalker/safer-buffer#why-not-safe-buffer). It will allow you +to also use the older `new Buffer()` API in your code, though — but that's arguably a benefit, as +it is problematic, can cause issues in your code, and will start emitting runtime deprecation +warnings starting with Node.js 10._ + +Note that in either case, it is important that you also remove all calls to the old Buffer +API manually — just throwing in `safe-buffer` doesn't fix the problem by itself, it just provides +a polyfill for the new API. I have seen people doing that mistake. + +Enabling eslint rule [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +is recommended. + +_Don't forget to drop the polyfill usage once you drop support for Node.js < 4.5.0._ + + +## Variant 3 — manual detection, with safeguards + +This is useful if you create Buffer instances in only a few places (e.g. one), or you have your own +wrapper around them. + +### Buffer(0) + +This special case for creating empty buffers can be safely replaced with `Buffer.concat([])`, which +returns the same result all the way down to Node.js 0.8.x. + +### Buffer(notNumber) + +Before: + +```js +var buf = new Buffer(notNumber, encoding); +``` + +After: + +```js +var buf; +if (Buffer.from && Buffer.from !== Uint8Array.from) { + buf = Buffer.from(notNumber, encoding); +} else { + if (typeof notNumber === 'number') + throw new Error('The "size" argument must be of type number.'); + buf = new Buffer(notNumber, encoding); +} +``` + +`encoding` is optional. + +Note that the `typeof notNumber` before `new Buffer` is required (for cases when `notNumber` argument is not +hard-coded) and _is not caused by the deprecation of Buffer constructor_ — it's exactly _why_ the +Buffer constructor is deprecated. Ecosystem packages lacking this type-check caused numereous +security issues — situations when unsanitized user input could end up in the `Buffer(arg)` create +problems ranging from DoS to leaking sensitive information to the attacker from the process memory. + +When `notNumber` argument is hardcoded (e.g. literal `"abc"` or `[0,1,2]`), the `typeof` check can +be omitted. + +Also note that using TypeScript does not fix this problem for you — when libs written in +`TypeScript` are used from JS, or when user input ends up there — it behaves exactly as pure JS, as +all type checks are translation-time only and are not present in the actual JS code which TS +compiles to. + +### Buffer(number) + +For Node.js 0.10.x (and below) support: + +```js +var buf; +if (Buffer.alloc) { + buf = Buffer.alloc(number); +} else { + buf = new Buffer(number); + buf.fill(0); +} +``` + +Otherwise (Node.js ≥ 0.12.x): + +```js +const buf = Buffer.alloc ? Buffer.alloc(number) : new Buffer(number).fill(0); +``` + +## Regarding Buffer.allocUnsafe + +Be extra cautious when using `Buffer.allocUnsafe`: + * Don't use it if you don't have a good reason to + * e.g. you probably won't ever see a performance difference for small buffers, in fact, those + might be even faster with `Buffer.alloc()`, + * if your code is not in the hot code path — you also probably won't notice a difference, + * keep in mind that zero-filling minimizes the potential risks. + * If you use it, make sure that you never return the buffer in a partially-filled state, + * if you are writing to it sequentially — always truncate it to the actuall written length + +Errors in handling buffers allocated with `Buffer.allocUnsafe` could result in various issues, +ranged from undefined behaviour of your code to sensitive data (user input, passwords, certs) +leaking to the remote attacker. + +_Note that the same applies to `new Buffer` usage without zero-filling, depending on the Node.js +version (and lacking type checks also adds DoS to the list of potential problems)._ + + +## FAQ + + +### What is wrong with the `Buffer` constructor? + +The `Buffer` constructor could be used to create a buffer in many different ways: + +- `new Buffer(42)` creates a `Buffer` of 42 bytes. Before Node.js 8, this buffer contained + *arbitrary memory* for performance reasons, which could include anything ranging from + program source code to passwords and encryption keys. +- `new Buffer('abc')` creates a `Buffer` that contains the UTF-8-encoded version of + the string `'abc'`. A second argument could specify another encoding: For example, + `new Buffer(string, 'base64')` could be used to convert a Base64 string into the original + sequence of bytes that it represents. +- There are several other combinations of arguments. + +This meant that, in code like `var buffer = new Buffer(foo);`, *it is not possible to tell +what exactly the contents of the generated buffer are* without knowing the type of `foo`. + +Sometimes, the value of `foo` comes from an external source. For example, this function +could be exposed as a service on a web server, converting a UTF-8 string into its Base64 form: + +``` +function stringToBase64(req, res) { + // The request body should have the format of `{ string: 'foobar' }` + const rawBytes = new Buffer(req.body.string) + const encoded = rawBytes.toString('base64') + res.end({ encoded: encoded }) +} +``` + +Note that this code does *not* validate the type of `req.body.string`: + +- `req.body.string` is expected to be a string. If this is the case, all goes well. +- `req.body.string` is controlled by the client that sends the request. +- If `req.body.string` is the *number* `50`, the `rawBytes` would be 50 bytes: + - Before Node.js 8, the content would be uninitialized + - After Node.js 8, the content would be `50` bytes with the value `0` + +Because of the missing type check, an attacker could intentionally send a number +as part of the request. Using this, they can either: + +- Read uninitialized memory. This **will** leak passwords, encryption keys and other + kinds of sensitive information. (Information leak) +- Force the program to allocate a large amount of memory. For example, when specifying + `500000000` as the input value, each request will allocate 500MB of memory. + This can be used to either exhaust the memory available of a program completely + and make it crash, or slow it down significantly. (Denial of Service) + +Both of these scenarios are considered serious security issues in a real-world +web server context. + +when using `Buffer.from(req.body.string)` instead, passing a number will always +throw an exception instead, giving a controlled behaviour that can always be +handled by the program. + + +### The `Buffer()` constructor has been deprecated for a while. Is this really an issue? + +Surveys of code in the `npm` ecosystem have shown that the `Buffer()` constructor is still +widely used. This includes new code, and overall usage of such code has actually been +*increasing*. diff --git a/node_modules/safer-buffer/Readme.md b/node_modules/safer-buffer/Readme.md new file mode 100644 index 00000000..14b08229 --- /dev/null +++ b/node_modules/safer-buffer/Readme.md @@ -0,0 +1,156 @@ +# safer-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![javascript style guide][standard-image]][standard-url] [![Security Responsible Disclosure][secuirty-image]][secuirty-url] + +[travis-image]: https://travis-ci.org/ChALkeR/safer-buffer.svg?branch=master +[travis-url]: https://travis-ci.org/ChALkeR/safer-buffer +[npm-image]: https://img.shields.io/npm/v/safer-buffer.svg +[npm-url]: https://npmjs.org/package/safer-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com +[secuirty-image]: https://img.shields.io/badge/Security-Responsible%20Disclosure-green.svg +[secuirty-url]: https://github.com/nodejs/security-wg/blob/master/processes/responsible_disclosure_template.md + +Modern Buffer API polyfill without footguns, working on Node.js from 0.8 to current. + +## How to use? + +First, port all `Buffer()` and `new Buffer()` calls to `Buffer.alloc()` and `Buffer.from()` API. + +Then, to achieve compatibility with outdated Node.js versions (`<4.5.0` and 5.x `<5.9.0`), use +`const Buffer = require('safer-buffer').Buffer` in all files where you make calls to the new +Buffer API. _Use `var` instead of `const` if you need that for your Node.js version range support._ + +Also, see the +[porting Buffer](https://github.com/ChALkeR/safer-buffer/blob/master/Porting-Buffer.md) guide. + +## Do I need it? + +Hopefully, not — dropping support for outdated Node.js versions should be fine nowdays, and that +is the recommended path forward. You _do_ need to port to the `Buffer.alloc()` and `Buffer.from()` +though. + +See the [porting guide](https://github.com/ChALkeR/safer-buffer/blob/master/Porting-Buffer.md) +for a better description. + +## Why not [safe-buffer](https://npmjs.com/safe-buffer)? + +_In short: while `safe-buffer` serves as a polyfill for the new API, it allows old API usage and +itself contains footguns._ + +`safe-buffer` could be used safely to get the new API while still keeping support for older +Node.js versions (like this module), but while analyzing ecosystem usage of the old Buffer API +I found out that `safe-buffer` is itself causing problems in some cases. + +For example, consider the following snippet: + +```console +$ cat example.unsafe.js +console.log(Buffer(20)) +$ ./node-v6.13.0-linux-x64/bin/node example.unsafe.js + +$ standard example.unsafe.js +standard: Use JavaScript Standard Style (https://standardjs.com) + /home/chalker/repo/safer-buffer/example.unsafe.js:2:13: 'Buffer()' was deprecated since v6. Use 'Buffer.alloc()' or 'Buffer.from()' (use 'https://www.npmjs.com/package/safe-buffer' for '<4.5.0') instead. +``` + +This is allocates and writes to console an uninitialized chunk of memory. +[standard](https://www.npmjs.com/package/standard) linter (among others) catch that and warn people +to avoid using unsafe API. + +Let's now throw in `safe-buffer`! + +```console +$ cat example.safe-buffer.js +const Buffer = require('safe-buffer').Buffer +console.log(Buffer(20)) +$ standard example.safe-buffer.js +$ ./node-v6.13.0-linux-x64/bin/node example.safe-buffer.js + +``` + +See the problem? Adding in `safe-buffer` _magically removes the lint warning_, but the behavior +remains identiсal to what we had before, and when launched on Node.js 6.x LTS — this dumps out +chunks of uninitialized memory. +_And this code will still emit runtime warnings on Node.js 10.x and above._ + +That was done by design. I first considered changing `safe-buffer`, prohibiting old API usage or +emitting warnings on it, but that significantly diverges from `safe-buffer` design. After some +discussion, it was decided to move my approach into a separate package, and _this is that separate +package_. + +This footgun is not imaginary — I observed top-downloaded packages doing that kind of thing, +«fixing» the lint warning by blindly including `safe-buffer` without any actual changes. + +Also in some cases, even if the API _was_ migrated to use of safe Buffer API — a random pull request +can bring unsafe Buffer API usage back to the codebase by adding new calls — and that could go +unnoticed even if you have a linter prohibiting that (becase of the reason stated above), and even +pass CI. _I also observed that being done in popular packages._ + +Some examples: + * [webdriverio](https://github.com/webdriverio/webdriverio/commit/05cbd3167c12e4930f09ef7cf93b127ba4effae4#diff-124380949022817b90b622871837d56cR31) + (a module with 548 759 downloads/month), + * [websocket-stream](https://github.com/maxogden/websocket-stream/commit/c9312bd24d08271687d76da0fe3c83493871cf61) + (218 288 d/m, fix in [maxogden/websocket-stream#142](https://github.com/maxogden/websocket-stream/pull/142)), + * [node-serialport](https://github.com/node-serialport/node-serialport/commit/e8d9d2b16c664224920ce1c895199b1ce2def48c) + (113 138 d/m, fix in [node-serialport/node-serialport#1510](https://github.com/node-serialport/node-serialport/pull/1510)), + * [karma](https://github.com/karma-runner/karma/commit/3d94b8cf18c695104ca195334dc75ff054c74eec) + (3 973 193 d/m, fix in [karma-runner/karma#2947](https://github.com/karma-runner/karma/pull/2947)), + * [spdy-transport](https://github.com/spdy-http2/spdy-transport/commit/5375ac33f4a62a4f65bcfc2827447d42a5dbe8b1) + (5 970 727 d/m, fix in [spdy-http2/spdy-transport#53](https://github.com/spdy-http2/spdy-transport/pull/53)). + * And there are a lot more over the ecosystem. + +I filed a PR at +[mysticatea/eslint-plugin-node#110](https://github.com/mysticatea/eslint-plugin-node/pull/110) to +partially fix that (for cases when that lint rule is used), but it is a semver-major change for +linter rules and presets, so it would take significant time for that to reach actual setups. +_It also hasn't been released yet (2018-03-20)._ + +Also, `safer-buffer` discourages the usage of `.allocUnsafe()`, which is often done by a mistake. +It still supports it with an explicit concern barier, by placing it under +`require('safer-buffer/dangereous')`. + +## But isn't throwing bad? + +Not really. It's an error that could be noticed and fixed early, instead of causing havoc later like +unguarded `new Buffer()` calls that end up receiving user input can do. + +This package affects only the files where `var Buffer = require('safer-buffer').Buffer` was done, so +it is really simple to keep track of things and make sure that you don't mix old API usage with that. +Also, CI should hint anything that you might have missed. + +New commits, if tested, won't land new usage of unsafe Buffer API this way. +_Node.js 10.x also deals with that by printing a runtime depecation warning._ + +### Would it affect third-party modules? + +No, unless you explicitly do an awful thing like monkey-patching or overriding the built-in `Buffer`. +Don't do that. + +### But I don't want throwing… + +That is also fine! + +Also, it could be better in some cases when you don't comprehensive enough test coverage. + +In that case — just don't override `Buffer` and use +`var SaferBuffer = require('safer-buffer').Buffer` instead. + +That way, everything using `Buffer` natively would still work, but there would be two drawbacks: + +* `Buffer.from`/`Buffer.alloc` won't be polyfilled — use `SaferBuffer.from` and + `SaferBuffer.alloc` instead. +* You are still open to accidentally using the insecure deprecated API — use a linter to catch that. + +Note that using a linter to catch accidential `Buffer` constructor usage in this case is strongly +recommended. `Buffer` is not overriden in this usecase, so linters won't get confused. + +## «Without footguns»? + +Well, it is still possible to do _some_ things with `Buffer` API, e.g. accessing `.buffer` property +on older versions and duping things from there. You shouldn't do that in your code, probabably. + +The intention is to remove the most significant footguns that affect lots of packages in the +ecosystem, and to do it in the proper way. + +Also, this package doesn't protect against security issues affecting some Node.js versions, so for +usage in your own production code, it is still recommended to update to a Node.js version +[supported by upstream](https://github.com/nodejs/release#release-schedule). diff --git a/node_modules/safer-buffer/dangerous.js b/node_modules/safer-buffer/dangerous.js new file mode 100644 index 00000000..ca41fdc5 --- /dev/null +++ b/node_modules/safer-buffer/dangerous.js @@ -0,0 +1,58 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var buffer = require('buffer') +var Buffer = buffer.Buffer +var safer = require('./safer.js') +var Safer = safer.Buffer + +var dangerous = {} + +var key + +for (key in safer) { + if (!safer.hasOwnProperty(key)) continue + dangerous[key] = safer[key] +} + +var Dangereous = dangerous.Buffer = {} + +// Copy Safer API +for (key in Safer) { + if (!Safer.hasOwnProperty(key)) continue + Dangereous[key] = Safer[key] +} + +// Copy those missing unsafe methods, if they are present +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (Dangereous.hasOwnProperty(key)) continue + Dangereous[key] = Buffer[key] +} + +if (!Dangereous.allocUnsafe) { + Dangereous.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + return Buffer(size) + } +} + +if (!Dangereous.allocUnsafeSlow) { + Dangereous.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + return buffer.SlowBuffer(size) + } +} + +module.exports = dangerous diff --git a/node_modules/safer-buffer/package.json b/node_modules/safer-buffer/package.json new file mode 100644 index 00000000..d452b04a --- /dev/null +++ b/node_modules/safer-buffer/package.json @@ -0,0 +1,34 @@ +{ + "name": "safer-buffer", + "version": "2.1.2", + "description": "Modern Buffer API polyfill without footguns", + "main": "safer.js", + "scripts": { + "browserify-test": "browserify --external tape tests.js > browserify-tests.js && tape browserify-tests.js", + "test": "standard && tape tests.js" + }, + "author": { + "name": "Nikita Skovoroda", + "email": "chalkerx@gmail.com", + "url": "https://github.com/ChALkeR" + }, + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/ChALkeR/safer-buffer.git" + }, + "bugs": { + "url": "https://github.com/ChALkeR/safer-buffer/issues" + }, + "devDependencies": { + "standard": "^11.0.1", + "tape": "^4.9.0" + }, + "files": [ + "Porting-Buffer.md", + "Readme.md", + "tests.js", + "dangerous.js", + "safer.js" + ] +} diff --git a/node_modules/safer-buffer/safer.js b/node_modules/safer-buffer/safer.js new file mode 100644 index 00000000..37c7e1aa --- /dev/null +++ b/node_modules/safer-buffer/safer.js @@ -0,0 +1,77 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var buffer = require('buffer') +var Buffer = buffer.Buffer + +var safer = {} + +var key + +for (key in buffer) { + if (!buffer.hasOwnProperty(key)) continue + if (key === 'SlowBuffer' || key === 'Buffer') continue + safer[key] = buffer[key] +} + +var Safer = safer.Buffer = {} +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue + Safer[key] = Buffer[key] +} + +safer.Buffer.prototype = Buffer.prototype + +if (!Safer.from || Safer.from === Uint8Array.from) { + Safer.from = function (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value) + } + if (value && typeof value.length === 'undefined') { + throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value) + } + return Buffer(value, encodingOrOffset, length) + } +} + +if (!Safer.alloc) { + Safer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + var buf = Buffer(size) + if (!fill || fill.length === 0) { + buf.fill(0) + } else if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + return buf + } +} + +if (!safer.kStringMaxLength) { + try { + safer.kStringMaxLength = process.binding('buffer').kStringMaxLength + } catch (e) { + // we can't determine kStringMaxLength in environments where process.binding + // is unsupported, so let's not set it + } +} + +if (!safer.constants) { + safer.constants = { + MAX_LENGTH: safer.kMaxLength + } + if (safer.kStringMaxLength) { + safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength + } +} + +module.exports = safer diff --git a/node_modules/safer-buffer/tests.js b/node_modules/safer-buffer/tests.js new file mode 100644 index 00000000..7ed2777c --- /dev/null +++ b/node_modules/safer-buffer/tests.js @@ -0,0 +1,406 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var test = require('tape') + +var buffer = require('buffer') + +var index = require('./') +var safer = require('./safer') +var dangerous = require('./dangerous') + +/* Inheritance tests */ + +test('Default is Safer', function (t) { + t.equal(index, safer) + t.notEqual(safer, dangerous) + t.notEqual(index, dangerous) + t.end() +}) + +test('Is not a function', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(typeof impl, 'object') + t.equal(typeof impl.Buffer, 'object') + }); + [buffer].forEach(function (impl) { + t.equal(typeof impl, 'object') + t.equal(typeof impl.Buffer, 'function') + }) + t.end() +}) + +test('Constructor throws', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.throws(function () { impl.Buffer() }) + t.throws(function () { impl.Buffer(0) }) + t.throws(function () { impl.Buffer('a') }) + t.throws(function () { impl.Buffer('a', 'utf-8') }) + t.throws(function () { return new impl.Buffer() }) + t.throws(function () { return new impl.Buffer(0) }) + t.throws(function () { return new impl.Buffer('a') }) + t.throws(function () { return new impl.Buffer('a', 'utf-8') }) + }) + t.end() +}) + +test('Safe methods exist', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(typeof impl.Buffer.alloc, 'function', 'alloc') + t.equal(typeof impl.Buffer.from, 'function', 'from') + }) + t.end() +}) + +test('Unsafe methods exist only in Dangerous', function (t) { + [index, safer].forEach(function (impl) { + t.equal(typeof impl.Buffer.allocUnsafe, 'undefined') + t.equal(typeof impl.Buffer.allocUnsafeSlow, 'undefined') + }); + [dangerous].forEach(function (impl) { + t.equal(typeof impl.Buffer.allocUnsafe, 'function') + t.equal(typeof impl.Buffer.allocUnsafeSlow, 'function') + }) + t.end() +}) + +test('Generic methods/properties are defined and equal', function (t) { + ['poolSize', 'isBuffer', 'concat', 'byteLength'].forEach(function (method) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], buffer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Built-in buffer static methods/properties are inherited', function (t) { + Object.keys(buffer).forEach(function (method) { + if (method === 'SlowBuffer' || method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], buffer[method], method) + t.notEqual(typeof impl[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Built-in Buffer static methods/properties are inherited', function (t) { + Object.keys(buffer.Buffer).forEach(function (method) { + if (method === 'allocUnsafe' || method === 'allocUnsafeSlow') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], buffer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('.prototype property of Buffer is inherited', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.prototype, buffer.Buffer.prototype, 'prototype') + t.notEqual(typeof impl.Buffer.prototype, 'undefined', 'prototype') + }) + t.end() +}) + +test('All Safer methods are present in Dangerous', function (t) { + Object.keys(safer).forEach(function (method) { + if (method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], safer[method], method) + if (method !== 'kStringMaxLength') { + t.notEqual(typeof impl[method], 'undefined', method) + } + }) + }) + Object.keys(safer.Buffer).forEach(function (method) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], safer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Safe methods from Dangerous methods are present in Safer', function (t) { + Object.keys(dangerous).forEach(function (method) { + if (method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], dangerous[method], method) + if (method !== 'kStringMaxLength') { + t.notEqual(typeof impl[method], 'undefined', method) + } + }) + }) + Object.keys(dangerous.Buffer).forEach(function (method) { + if (method === 'allocUnsafe' || method === 'allocUnsafeSlow') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], dangerous.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +/* Behaviour tests */ + +test('Methods return Buffers', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0, 10))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0, 'a'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(10))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(10, 'x'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(9, 'ab'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from(''))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('string'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('string', 'utf-8'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from([0, 42, 3]))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from(new Uint8Array([0, 42, 3])))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from([]))) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.ok(buffer.Buffer.isBuffer(dangerous.Buffer[method](0))) + t.ok(buffer.Buffer.isBuffer(dangerous.Buffer[method](10))) + }) + t.end() +}) + +test('Constructor is buffer.Buffer', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.alloc(0).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(0, 10).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(0, 'a').constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(10).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(10, 'x').constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(9, 'ab').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('string').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('string', 'utf-8').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64').constructor, buffer.Buffer) + t.equal(impl.Buffer.from([0, 42, 3]).constructor, buffer.Buffer) + t.equal(impl.Buffer.from(new Uint8Array([0, 42, 3])).constructor, buffer.Buffer) + t.equal(impl.Buffer.from([]).constructor, buffer.Buffer) + }); + [0, 10, 100].forEach(function (arg) { + t.equal(dangerous.Buffer.allocUnsafe(arg).constructor, buffer.Buffer) + t.equal(dangerous.Buffer.allocUnsafeSlow(arg).constructor, buffer.SlowBuffer(0).constructor) + }) + t.end() +}) + +test('Invalid calls throw', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.throws(function () { impl.Buffer.from(0) }) + t.throws(function () { impl.Buffer.from(10) }) + t.throws(function () { impl.Buffer.from(10, 'utf-8') }) + t.throws(function () { impl.Buffer.from('string', 'invalid encoding') }) + t.throws(function () { impl.Buffer.from(-10) }) + t.throws(function () { impl.Buffer.from(1e90) }) + t.throws(function () { impl.Buffer.from(Infinity) }) + t.throws(function () { impl.Buffer.from(-Infinity) }) + t.throws(function () { impl.Buffer.from(NaN) }) + t.throws(function () { impl.Buffer.from(null) }) + t.throws(function () { impl.Buffer.from(undefined) }) + t.throws(function () { impl.Buffer.from() }) + t.throws(function () { impl.Buffer.from({}) }) + t.throws(function () { impl.Buffer.alloc('') }) + t.throws(function () { impl.Buffer.alloc('string') }) + t.throws(function () { impl.Buffer.alloc('string', 'utf-8') }) + t.throws(function () { impl.Buffer.alloc('b25ldHdvdGhyZWU=', 'base64') }) + t.throws(function () { impl.Buffer.alloc(-10) }) + t.throws(function () { impl.Buffer.alloc(1e90) }) + t.throws(function () { impl.Buffer.alloc(2 * (1 << 30)) }) + t.throws(function () { impl.Buffer.alloc(Infinity) }) + t.throws(function () { impl.Buffer.alloc(-Infinity) }) + t.throws(function () { impl.Buffer.alloc(null) }) + t.throws(function () { impl.Buffer.alloc(undefined) }) + t.throws(function () { impl.Buffer.alloc() }) + t.throws(function () { impl.Buffer.alloc([]) }) + t.throws(function () { impl.Buffer.alloc([0, 42, 3]) }) + t.throws(function () { impl.Buffer.alloc({}) }) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.throws(function () { dangerous.Buffer[method]('') }) + t.throws(function () { dangerous.Buffer[method]('string') }) + t.throws(function () { dangerous.Buffer[method]('string', 'utf-8') }) + t.throws(function () { dangerous.Buffer[method](2 * (1 << 30)) }) + t.throws(function () { dangerous.Buffer[method](Infinity) }) + if (dangerous.Buffer[method] === buffer.Buffer.allocUnsafe) { + t.skip('Skipping, older impl of allocUnsafe coerced negative sizes to 0') + } else { + t.throws(function () { dangerous.Buffer[method](-10) }) + t.throws(function () { dangerous.Buffer[method](-1e90) }) + t.throws(function () { dangerous.Buffer[method](-Infinity) }) + } + t.throws(function () { dangerous.Buffer[method](null) }) + t.throws(function () { dangerous.Buffer[method](undefined) }) + t.throws(function () { dangerous.Buffer[method]() }) + t.throws(function () { dangerous.Buffer[method]([]) }) + t.throws(function () { dangerous.Buffer[method]([0, 42, 3]) }) + t.throws(function () { dangerous.Buffer[method]({}) }) + }) + t.end() +}) + +test('Buffers have appropriate lengths', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.alloc(0).length, 0) + t.equal(impl.Buffer.alloc(10).length, 10) + t.equal(impl.Buffer.from('').length, 0) + t.equal(impl.Buffer.from('string').length, 6) + t.equal(impl.Buffer.from('string', 'utf-8').length, 6) + t.equal(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64').length, 11) + t.equal(impl.Buffer.from([0, 42, 3]).length, 3) + t.equal(impl.Buffer.from(new Uint8Array([0, 42, 3])).length, 3) + t.equal(impl.Buffer.from([]).length, 0) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.equal(dangerous.Buffer[method](0).length, 0) + t.equal(dangerous.Buffer[method](10).length, 10) + }) + t.end() +}) + +test('Buffers have appropriate lengths (2)', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true; + [ safer.Buffer.alloc, + dangerous.Buffer.allocUnsafe, + dangerous.Buffer.allocUnsafeSlow + ].forEach(function (method) { + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 1e5) + var buf = method(length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + } + }) + t.ok(ok) + t.end() +}) + +test('.alloc(size) is zero-filled and has correct length', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var buf = index.Buffer.alloc(length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + var j + for (j = 0; j < length; j++) { + if (buf[j] !== 0) ok = false + } + buf.fill(1) + for (j = 0; j < length; j++) { + if (buf[j] !== 1) ok = false + } + } + t.ok(ok) + t.end() +}) + +test('.allocUnsafe / .allocUnsafeSlow are fillable and have correct lengths', function (t) { + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var buf = dangerous.Buffer[method](length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + buf.fill(0, 0, length) + var j + for (j = 0; j < length; j++) { + if (buf[j] !== 0) ok = false + } + buf.fill(1, 0, length) + for (j = 0; j < length; j++) { + if (buf[j] !== 1) ok = false + } + } + t.ok(ok, method) + }) + t.end() +}) + +test('.alloc(size, fill) is `fill`-filled', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var fill = Math.round(Math.random() * 255) + var buf = index.Buffer.alloc(length, fill) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + for (var j = 0; j < length; j++) { + if (buf[j] !== fill) ok = false + } + } + t.ok(ok) + t.end() +}) + +test('.alloc(size, fill) is `fill`-filled', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var fill = Math.round(Math.random() * 255) + var buf = index.Buffer.alloc(length, fill) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + for (var j = 0; j < length; j++) { + if (buf[j] !== fill) ok = false + } + } + t.ok(ok) + t.deepEqual(index.Buffer.alloc(9, 'a'), index.Buffer.alloc(9, 97)) + t.notDeepEqual(index.Buffer.alloc(9, 'a'), index.Buffer.alloc(9, 98)) + + var tmp = new buffer.Buffer(2) + tmp.fill('ok') + if (tmp[1] === tmp[0]) { + // Outdated Node.js + t.deepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('ooooo')) + } else { + t.deepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('okoko')) + } + t.notDeepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('kokok')) + + t.end() +}) + +test('safer.Buffer.from returns results same as Buffer constructor', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.deepEqual(impl.Buffer.from(''), new buffer.Buffer('')) + t.deepEqual(impl.Buffer.from('string'), new buffer.Buffer('string')) + t.deepEqual(impl.Buffer.from('string', 'utf-8'), new buffer.Buffer('string', 'utf-8')) + t.deepEqual(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'), new buffer.Buffer('b25ldHdvdGhyZWU=', 'base64')) + t.deepEqual(impl.Buffer.from([0, 42, 3]), new buffer.Buffer([0, 42, 3])) + t.deepEqual(impl.Buffer.from(new Uint8Array([0, 42, 3])), new buffer.Buffer(new Uint8Array([0, 42, 3]))) + t.deepEqual(impl.Buffer.from([]), new buffer.Buffer([])) + }) + t.end() +}) + +test('safer.Buffer.from returns consistent results', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.deepEqual(impl.Buffer.from(''), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from([]), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from(new Uint8Array([])), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from('string', 'utf-8'), impl.Buffer.from('string')) + t.deepEqual(impl.Buffer.from('string'), impl.Buffer.from([115, 116, 114, 105, 110, 103])) + t.deepEqual(impl.Buffer.from('string'), impl.Buffer.from(impl.Buffer.from('string'))) + t.deepEqual(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'), impl.Buffer.from('onetwothree')) + t.notDeepEqual(impl.Buffer.from('b25ldHdvdGhyZWU='), impl.Buffer.from('onetwothree')) + }) + t.end() +}) diff --git a/node_modules/sax/LICENSE b/node_modules/sax/LICENSE new file mode 100644 index 00000000..ccffa082 --- /dev/null +++ b/node_modules/sax/LICENSE @@ -0,0 +1,41 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +==== + +`String.fromCodePoint` by Mathias Bynens used according to terms of MIT +License, as follows: + + Copyright Mathias Bynens + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/sax/README.md b/node_modules/sax/README.md new file mode 100644 index 00000000..afcd3f3d --- /dev/null +++ b/node_modules/sax/README.md @@ -0,0 +1,225 @@ +# sax js + +A sax-style parser for XML and HTML. + +Designed with [node](http://nodejs.org/) in mind, but should work fine in +the browser or other CommonJS implementations. + +## What This Is + +* A very simple tool to parse through an XML string. +* A stepping stone to a streaming HTML parser. +* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML + docs. + +## What This Is (probably) Not + +* An HTML Parser - That's a fine goal, but this isn't it. It's just + XML. +* A DOM Builder - You can use it to build an object model out of XML, + but it doesn't do that out of the box. +* XSLT - No DOM = no querying. +* 100% Compliant with (some other SAX implementation) - Most SAX + implementations are in Java and do a lot more than this does. +* An XML Validator - It does a little validation when in strict mode, but + not much. +* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic + masochism. +* A DTD-aware Thing - Fetching DTDs is a much bigger job. + +## Regarding `Hello, world!').close(); + +// stream usage +// takes the same options as the parser +var saxStream = require("sax").createStream(strict, options) +saxStream.on("error", function (e) { + // unhandled errors will throw, since this is a proper node + // event emitter. + console.error("error!", e) + // clear the error + this._parser.error = null + this._parser.resume() +}) +saxStream.on("opentag", function (node) { + // same object as above +}) +// pipe is supported, and it's readable/writable +// same chunks coming in also go out. +fs.createReadStream("file.xml") + .pipe(saxStream) + .pipe(fs.createWriteStream("file-copy.xml")) +``` + + +## Arguments + +Pass the following arguments to the parser function. All are optional. + +`strict` - Boolean. Whether or not to be a jerk. Default: `false`. + +`opt` - Object bag of settings regarding string formatting. All default to `false`. + +Settings supported: + +* `trim` - Boolean. Whether or not to trim text and comment nodes. +* `normalize` - Boolean. If true, then turn any whitespace into a single + space. +* `lowercase` - Boolean. If true, then lowercase tag names and attribute names + in loose mode, rather than uppercasing them. +* `xmlns` - Boolean. If true, then namespaces are supported. +* `position` - Boolean. If false, then don't track line/col/position. +* `strictEntities` - Boolean. If true, only parse [predefined XML + entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent) + (`&`, `'`, `>`, `<`, and `"`) + +## Methods + +`write` - Write bytes onto the stream. You don't have to do this all at +once. You can keep writing as much as you want. + +`close` - Close the stream. Once closed, no more data may be written until +it is done processing the buffer, which is signaled by the `end` event. + +`resume` - To gracefully handle errors, assign a listener to the `error` +event. Then, when the error is taken care of, you can call `resume` to +continue parsing. Otherwise, the parser will not continue while in an error +state. + +## Members + +At all times, the parser object will have the following members: + +`line`, `column`, `position` - Indications of the position in the XML +document where the parser currently is looking. + +`startTagPosition` - Indicates the position where the current tag starts. + +`closed` - Boolean indicating whether or not the parser can be written to. +If it's `true`, then wait for the `ready` event to write again. + +`strict` - Boolean indicating whether or not the parser is a jerk. + +`opt` - Any options passed into the constructor. + +`tag` - The current tag being dealt with. + +And a bunch of other stuff that you probably shouldn't touch. + +## Events + +All events emit with a single argument. To listen to an event, assign a +function to `on`. Functions get executed in the this-context of +the parser object. The list of supported events are also in the exported +`EVENTS` array. + +When using the stream interface, assign handlers using the EventEmitter +`on` function in the normal fashion. + +`error` - Indication that something bad happened. The error will be hanging +out on `parser.error`, and must be deleted before parsing can continue. By +listening to this event, you can keep an eye on that kind of stuff. Note: +this happens *much* more in strict mode. Argument: instance of `Error`. + +`text` - Text node. Argument: string of text. + +`doctype` - The ``. Argument: +object with `name` and `body` members. Attributes are not parsed, as +processing instructions have implementation dependent semantics. + +`sgmldeclaration` - Random SGML declarations. Stuff like `` +would trigger this kind of event. This is a weird thing to support, so it +might go away at some point. SAX isn't intended to be used to parse SGML, +after all. + +`opentagstart` - Emitted immediately when the tag name is available, +but before any attributes are encountered. Argument: object with a +`name` field and an empty `attributes` set. Note that this is the +same object that will later be emitted in the `opentag` event. + +`opentag` - An opening tag. Argument: object with `name` and `attributes`. +In non-strict mode, tag names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, then it will contain +namespace binding information on the `ns` member, and will have a +`local`, `prefix`, and `uri` member. + +`closetag` - A closing tag. In loose mode, tags are auto-closed if their +parent closes. In strict mode, well-formedness is enforced. Note that +self-closing tags will have `closeTag` emitted immediately after `openTag`. +Argument: tag name. + +`attribute` - An attribute node. Argument: object with `name` and `value`. +In non-strict mode, attribute names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, it will also contains namespace +information. + +`comment` - A comment node. Argument: the string of the comment. + +`opencdata` - The opening tag of a ``) of a `` tags trigger a `"script"` +event, and their contents are not checked for special xml characters. +If you pass `noscript: true`, then this behavior is suppressed. + +## Reporting Problems + +It's best to write a failing test if you find an issue. I will always +accept pull requests with failing tests if they demonstrate intended +behavior, but it is very hard to figure out what issue you're describing +without a test. Writing a test is also the best way for you yourself +to figure out if you really understand the issue you think you have with +sax-js. diff --git a/node_modules/sax/lib/sax.js b/node_modules/sax/lib/sax.js new file mode 100644 index 00000000..795d607e --- /dev/null +++ b/node_modules/sax/lib/sax.js @@ -0,0 +1,1565 @@ +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = require('stream').Stream + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = require('string_decoder').StringDecoder + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, // + +-------------------------------------------------------------------------------- + + + + + +## Table of Contents + +- [Examples](#examples) + - [Consuming a source map](#consuming-a-source-map) + - [Generating a source map](#generating-a-source-map) + - [With SourceNode (high level API)](#with-sourcenode-high-level-api) + - [With SourceMapGenerator (low level API)](#with-sourcemapgenerator-low-level-api) +- [API](#api) + - [SourceMapConsumer](#sourcemapconsumer) + - [new SourceMapConsumer(rawSourceMap)](#new-sourcemapconsumerrawsourcemap) + - [SourceMapConsumer.prototype.computeColumnSpans()](#sourcemapconsumerprototypecomputecolumnspans) + - [SourceMapConsumer.prototype.originalPositionFor(generatedPosition)](#sourcemapconsumerprototypeoriginalpositionforgeneratedposition) + - [SourceMapConsumer.prototype.generatedPositionFor(originalPosition)](#sourcemapconsumerprototypegeneratedpositionfororiginalposition) + - [SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)](#sourcemapconsumerprototypeallgeneratedpositionsfororiginalposition) + - [SourceMapConsumer.prototype.hasContentsOfAllSources()](#sourcemapconsumerprototypehascontentsofallsources) + - [SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])](#sourcemapconsumerprototypesourcecontentforsource-returnnullonmissing) + - [SourceMapConsumer.prototype.eachMapping(callback, context, order)](#sourcemapconsumerprototypeeachmappingcallback-context-order) + - [SourceMapGenerator](#sourcemapgenerator) + - [new SourceMapGenerator([startOfSourceMap])](#new-sourcemapgeneratorstartofsourcemap) + - [SourceMapGenerator.fromSourceMap(sourceMapConsumer)](#sourcemapgeneratorfromsourcemapsourcemapconsumer) + - [SourceMapGenerator.prototype.addMapping(mapping)](#sourcemapgeneratorprototypeaddmappingmapping) + - [SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)](#sourcemapgeneratorprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])](#sourcemapgeneratorprototypeapplysourcemapsourcemapconsumer-sourcefile-sourcemappath) + - [SourceMapGenerator.prototype.toString()](#sourcemapgeneratorprototypetostring) + - [SourceNode](#sourcenode) + - [new SourceNode([line, column, source[, chunk[, name]]])](#new-sourcenodeline-column-source-chunk-name) + - [SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])](#sourcenodefromstringwithsourcemapcode-sourcemapconsumer-relativepath) + - [SourceNode.prototype.add(chunk)](#sourcenodeprototypeaddchunk) + - [SourceNode.prototype.prepend(chunk)](#sourcenodeprototypeprependchunk) + - [SourceNode.prototype.setSourceContent(sourceFile, sourceContent)](#sourcenodeprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceNode.prototype.walk(fn)](#sourcenodeprototypewalkfn) + - [SourceNode.prototype.walkSourceContents(fn)](#sourcenodeprototypewalksourcecontentsfn) + - [SourceNode.prototype.join(sep)](#sourcenodeprototypejoinsep) + - [SourceNode.prototype.replaceRight(pattern, replacement)](#sourcenodeprototypereplacerightpattern-replacement) + - [SourceNode.prototype.toString()](#sourcenodeprototypetostring) + - [SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])](#sourcenodeprototypetostringwithsourcemapstartofsourcemap) + + + +## Examples + +### Consuming a source map + +```js +var rawSourceMap = { + version: 3, + file: 'min.js', + names: ['bar', 'baz', 'n'], + sources: ['one.js', 'two.js'], + sourceRoot: 'http://example.com/www/js/', + mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' +}; + +var smc = new SourceMapConsumer(rawSourceMap); + +console.log(smc.sources); +// [ 'http://example.com/www/js/one.js', +// 'http://example.com/www/js/two.js' ] + +console.log(smc.originalPositionFor({ + line: 2, + column: 28 +})); +// { source: 'http://example.com/www/js/two.js', +// line: 2, +// column: 10, +// name: 'n' } + +console.log(smc.generatedPositionFor({ + source: 'http://example.com/www/js/two.js', + line: 2, + column: 10 +})); +// { line: 2, column: 28 } + +smc.eachMapping(function (m) { + // ... +}); +``` + +### Generating a source map + +In depth guide: +[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/) + +#### With SourceNode (high level API) + +```js +function compile(ast) { + switch (ast.type) { + case 'BinaryExpression': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + [compile(ast.left), " + ", compile(ast.right)] + ); + case 'Literal': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + String(ast.value) + ); + // ... + default: + throw new Error("Bad AST"); + } +} + +var ast = parse("40 + 2", "add.js"); +console.log(compile(ast).toStringWithSourceMap({ + file: 'add.js' +})); +// { code: '40 + 2', +// map: [object SourceMapGenerator] } +``` + +#### With SourceMapGenerator (low level API) + +```js +var map = new SourceMapGenerator({ + file: "source-mapped.js" +}); + +map.addMapping({ + generated: { + line: 10, + column: 35 + }, + source: "foo.js", + original: { + line: 33, + column: 2 + }, + name: "christopher" +}); + +console.log(map.toString()); +// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}' +``` + +## API + +Get a reference to the module: + +```js +// Node.js +var sourceMap = require('source-map'); + +// Browser builds +var sourceMap = window.sourceMap; + +// Inside Firefox +const sourceMap = require("devtools/toolkit/sourcemap/source-map.js"); +``` + +### SourceMapConsumer + +A SourceMapConsumer instance represents a parsed source map which we can query +for information about the original file positions by giving it a file position +in the generated source. + +#### new SourceMapConsumer(rawSourceMap) + +The only parameter is the raw source map (either as a string which can be +`JSON.parse`'d, or an object). According to the spec, source maps have the +following attributes: + +* `version`: Which version of the source map spec this map is following. + +* `sources`: An array of URLs to the original source files. + +* `names`: An array of identifiers which can be referenced by individual + mappings. + +* `sourceRoot`: Optional. The URL root from which all sources are relative. + +* `sourcesContent`: Optional. An array of contents of the original source files. + +* `mappings`: A string of base64 VLQs which contain the actual mappings. + +* `file`: Optional. The generated filename this source map is associated with. + +```js +var consumer = new sourceMap.SourceMapConsumer(rawSourceMapJsonData); +``` + +#### SourceMapConsumer.prototype.computeColumnSpans() + +Compute the last column for each generated mapping. The last column is +inclusive. + +```js +// Before: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] + +consumer.computeColumnSpans(); + +// After: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1, +// lastColumn: 9 }, +// { line: 2, +// column: 10, +// lastColumn: 19 }, +// { line: 2, +// column: 20, +// lastColumn: Infinity } ] + +``` + +#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition) + +Returns the original source, line, and column information for the generated +source's line and column positions provided. The only argument is an object with +the following properties: + +* `line`: The line number in the generated source. + +* `column`: The column number in the generated source. + +* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or + `SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest + element that is smaller than or greater than the one we are searching for, + respectively, if the exact element cannot be found. Defaults to + `SourceMapConsumer.GREATEST_LOWER_BOUND`. + +and an object is returned with the following properties: + +* `source`: The original source file, or null if this information is not + available. + +* `line`: The line number in the original source, or null if this information is + not available. + +* `column`: The column number in the original source, or null if this + information is not available. + +* `name`: The original identifier, or null if this information is not available. + +```js +consumer.originalPositionFor({ line: 2, column: 10 }) +// { source: 'foo.coffee', +// line: 2, +// column: 2, +// name: null } + +consumer.originalPositionFor({ line: 99999999999999999, column: 999999999999999 }) +// { source: null, +// line: null, +// column: null, +// name: null } +``` + +#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition) + +Returns the generated line and column information for the original source, +line, and column positions provided. The only argument is an object with +the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. + +* `column`: The column number in the original source. + +and an object is returned with the following properties: + +* `line`: The line number in the generated source, or null. + +* `column`: The column number in the generated source, or null. + +```js +consumer.generatedPositionFor({ source: "example.js", line: 2, column: 10 }) +// { line: 1, +// column: 56 } +``` + +#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition) + +Returns all generated line and column information for the original source, line, +and column provided. If no column is provided, returns all mappings +corresponding to a either the line we are searching for or the next closest line +that has any mappings. Otherwise, returns all mappings corresponding to the +given line and either the column we are searching for or the next closest column +that has any offsets. + +The only argument is an object with the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. + +* `column`: Optional. The column number in the original source. + +and an array of objects is returned, each with the following properties: + +* `line`: The line number in the generated source, or null. + +* `column`: The column number in the generated source, or null. + +```js +consumer.allGeneratedpositionsfor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] +``` + +#### SourceMapConsumer.prototype.hasContentsOfAllSources() + +Return true if we have the embedded source content for every source listed in +the source map, false otherwise. + +In other words, if this method returns `true`, then +`consumer.sourceContentFor(s)` will succeed for every source `s` in +`consumer.sources`. + +```js +// ... +if (consumer.hasContentsOfAllSources()) { + consumerReadyCallback(consumer); +} else { + fetchSources(consumer, consumerReadyCallback); +} +// ... +``` + +#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing]) + +Returns the original source content for the source provided. The only +argument is the URL of the original source file. + +If the source content for the given source is not found, then an error is +thrown. Optionally, pass `true` as the second param to have `null` returned +instead. + +```js +consumer.sources +// [ "my-cool-lib.clj" ] + +consumer.sourceContentFor("my-cool-lib.clj") +// "..." + +consumer.sourceContentFor("this is not in the source map"); +// Error: "this is not in the source map" is not in the source map + +consumer.sourceContentFor("this is not in the source map", true); +// null +``` + +#### SourceMapConsumer.prototype.eachMapping(callback, context, order) + +Iterate over each mapping between an original source/line/column and a +generated line/column in this source map. + +* `callback`: The function that is called with each mapping. Mappings have the + form `{ source, generatedLine, generatedColumn, originalLine, originalColumn, + name }` + +* `context`: Optional. If specified, this object will be the value of `this` + every time that `callback` is called. + +* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or + `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over + the mappings sorted by the generated file's line/column order or the + original's source/line/column order, respectively. Defaults to + `SourceMapConsumer.GENERATED_ORDER`. + +```js +consumer.eachMapping(function (m) { console.log(m); }) +// ... +// { source: 'illmatic.js', +// generatedLine: 1, +// generatedColumn: 0, +// originalLine: 1, +// originalColumn: 0, +// name: null } +// { source: 'illmatic.js', +// generatedLine: 2, +// generatedColumn: 0, +// originalLine: 2, +// originalColumn: 0, +// name: null } +// ... +``` +### SourceMapGenerator + +An instance of the SourceMapGenerator represents a source map which is being +built incrementally. + +#### new SourceMapGenerator([startOfSourceMap]) + +You may pass an object with the following properties: + +* `file`: The filename of the generated source that this source map is + associated with. + +* `sourceRoot`: A root for all relative URLs in this source map. + +* `skipValidation`: Optional. When `true`, disables validation of mappings as + they are added. This can improve performance but should be used with + discretion, as a last resort. Even then, one should avoid using this flag when + running tests, if possible. + +```js +var generator = new sourceMap.SourceMapGenerator({ + file: "my-generated-javascript-file.js", + sourceRoot: "http://example.com/app/js/" +}); +``` + +#### SourceMapGenerator.fromSourceMap(sourceMapConsumer) + +Creates a new `SourceMapGenerator` from an existing `SourceMapConsumer` instance. + +* `sourceMapConsumer` The SourceMap. + +```js +var generator = sourceMap.SourceMapGenerator.fromSourceMap(consumer); +``` + +#### SourceMapGenerator.prototype.addMapping(mapping) + +Add a single mapping from original source line and column to the generated +source's line and column for this source map being created. The mapping object +should have the following properties: + +* `generated`: An object with the generated line and column positions. + +* `original`: An object with the original line and column positions. + +* `source`: The original source file (relative to the sourceRoot). + +* `name`: An optional original token name for this mapping. + +```js +generator.addMapping({ + source: "module-one.scm", + original: { line: 128, column: 0 }, + generated: { line: 3, column: 456 } +}) +``` + +#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for an original source file. + +* `sourceFile` the URL of the original source file. + +* `sourceContent` the content of the source file. + +```js +generator.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]]) + +Applies a SourceMap for a source file to the SourceMap. +Each mapping to the supplied source file is rewritten using the +supplied SourceMap. Note: The resolution for the resulting mappings +is the minimum of this map and the supplied map. + +* `sourceMapConsumer`: The SourceMap to be applied. + +* `sourceFile`: Optional. The filename of the source file. + If omitted, sourceMapConsumer.file will be used, if it exists. + Otherwise an error will be thrown. + +* `sourceMapPath`: Optional. The dirname of the path to the SourceMap + to be applied. If relative, it is relative to the SourceMap. + + This parameter is needed when the two SourceMaps aren't in the same + directory, and the SourceMap to be applied contains relative source + paths. If so, those relative source paths need to be rewritten + relative to the SourceMap. + + If omitted, it is assumed that both SourceMaps are in the same directory, + thus not needing any rewriting. (Supplying `'.'` has the same effect.) + +#### SourceMapGenerator.prototype.toString() + +Renders the source map being generated to a string. + +```js +generator.toString() +// '{"version":3,"sources":["module-one.scm"],"names":[],"mappings":"...snip...","file":"my-generated-javascript-file.js","sourceRoot":"http://example.com/app/js/"}' +``` + +### SourceNode + +SourceNodes provide a way to abstract over interpolating and/or concatenating +snippets of generated JavaScript source code, while maintaining the line and +column information associated between those snippets and the original source +code. This is useful as the final intermediate representation a compiler might +use before outputting the generated JS and source map. + +#### new SourceNode([line, column, source[, chunk[, name]]]) + +* `line`: The original line number associated with this source node, or null if + it isn't associated with an original line. + +* `column`: The original column number associated with this source node, or null + if it isn't associated with an original column. + +* `source`: The original source's filename; null if no filename is provided. + +* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see + below. + +* `name`: Optional. The original identifier. + +```js +var node = new SourceNode(1, 2, "a.cpp", [ + new SourceNode(3, 4, "b.cpp", "extern int status;\n"), + new SourceNode(5, 6, "c.cpp", "std::string* make_string(size_t n);\n"), + new SourceNode(7, 8, "d.cpp", "int main(int argc, char** argv) {}\n"), +]); +``` + +#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath]) + +Creates a SourceNode from generated code and a SourceMapConsumer. + +* `code`: The generated code + +* `sourceMapConsumer` The SourceMap for the generated code + +* `relativePath` The optional path that relative sources in `sourceMapConsumer` + should be relative to. + +```js +var consumer = new SourceMapConsumer(fs.readFileSync("path/to/my-file.js.map", "utf8")); +var node = SourceNode.fromStringWithSourceMap(fs.readFileSync("path/to/my-file.js"), + consumer); +``` + +#### SourceNode.prototype.add(chunk) + +Add a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.add(" + "); +node.add(otherNode); +node.add([leftHandOperandNode, " + ", rightHandOperandNode]); +``` + +#### SourceNode.prototype.prepend(chunk) + +Prepend a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.prepend("/** Build Id: f783haef86324gf **/\n\n"); +``` + +#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for a source file. This will be added to the +`SourceMap` in the `sourcesContent` field. + +* `sourceFile`: The filename of the source file + +* `sourceContent`: The content of the source file + +```js +node.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceNode.prototype.walk(fn) + +Walk over the tree of JS snippets in this node and its children. The walking +function is called once for each snippet of JS and is passed that snippet and +the its original associated source's line/column location. + +* `fn`: The traversal function. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.walk(function (code, loc) { console.log("WALK:", code, loc); }) +// WALK: uno { source: 'b.js', line: 3, column: 4, name: null } +// WALK: dos { source: 'a.js', line: 1, column: 2, name: null } +// WALK: tres { source: 'a.js', line: 1, column: 2, name: null } +// WALK: quatro { source: 'c.js', line: 5, column: 6, name: null } +``` + +#### SourceNode.prototype.walkSourceContents(fn) + +Walk over the tree of SourceNodes. The walking function is called for each +source file content and is passed the filename and source content. + +* `fn`: The traversal function. + +```js +var a = new SourceNode(1, 2, "a.js", "generated from a"); +a.setSourceContent("a.js", "original a"); +var b = new SourceNode(1, 2, "b.js", "generated from b"); +b.setSourceContent("b.js", "original b"); +var c = new SourceNode(1, 2, "c.js", "generated from c"); +c.setSourceContent("c.js", "original c"); + +var node = new SourceNode(null, null, null, [a, b, c]); +node.walkSourceContents(function (source, contents) { console.log("WALK:", source, ":", contents); }) +// WALK: a.js : original a +// WALK: b.js : original b +// WALK: c.js : original c +``` + +#### SourceNode.prototype.join(sep) + +Like `Array.prototype.join` except for SourceNodes. Inserts the separator +between each of this source node's children. + +* `sep`: The separator. + +```js +var lhs = new SourceNode(1, 2, "a.rs", "my_copy"); +var operand = new SourceNode(3, 4, "a.rs", "="); +var rhs = new SourceNode(5, 6, "a.rs", "orig.clone()"); + +var node = new SourceNode(null, null, null, [ lhs, operand, rhs ]); +var joinedNode = node.join(" "); +``` + +#### SourceNode.prototype.replaceRight(pattern, replacement) + +Call `String.prototype.replace` on the very right-most source snippet. Useful +for trimming white space from the end of a source node, etc. + +* `pattern`: The pattern to replace. + +* `replacement`: The thing to replace the pattern with. + +```js +// Trim trailing white space. +node.replaceRight(/\s*$/, ""); +``` + +#### SourceNode.prototype.toString() + +Return the string representation of this source node. Walks over the tree and +concatenates all the various snippets together to one string. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toString() +// 'unodostresquatro' +``` + +#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap]) + +Returns the string representation of this tree of source nodes, plus a +SourceMapGenerator which contains all the mappings between the generated and +original sources. + +The arguments are the same as those to `new SourceMapGenerator`. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toStringWithSourceMap({ file: "my-output-file.js" }) +// { code: 'unodostresquatro', +// map: [object SourceMapGenerator] } +``` diff --git a/node_modules/source-map/dist/source-map.debug.js b/node_modules/source-map/dist/source-map.debug.js new file mode 100644 index 00000000..b5ab6382 --- /dev/null +++ b/node_modules/source-map/dist/source-map.debug.js @@ -0,0 +1,3091 @@ +(function webpackUniversalModuleDefinition(root, factory) { + if(typeof exports === 'object' && typeof module === 'object') + module.exports = factory(); + else if(typeof define === 'function' && define.amd) + define([], factory); + else if(typeof exports === 'object') + exports["sourceMap"] = factory(); + else + root["sourceMap"] = factory(); +})(this, function() { +return /******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) +/******/ return installedModules[moduleId].exports; +/******/ +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ exports: {}, +/******/ id: moduleId, +/******/ loaded: false +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.loaded = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(0); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, exports, __webpack_require__) { + + /* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ + exports.SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + exports.SourceMapConsumer = __webpack_require__(7).SourceMapConsumer; + exports.SourceNode = __webpack_require__(10).SourceNode; + + +/***/ }), +/* 1 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var base64VLQ = __webpack_require__(2); + var util = __webpack_require__(4); + var ArraySet = __webpack_require__(5).ArraySet; + var MappingList = __webpack_require__(6).MappingList; + + /** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ + function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; + } + + SourceMapGenerator.prototype._version = 3; + + /** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ + SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + + /** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ + SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + + /** + * Set the source content for a source file. + */ + SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + + /** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ + SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + + /** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ + SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { + throw new Error( + 'original.line and original.column are not numbers -- you probably meant to omit ' + + 'the original mapping entirely and only map the generated position. If so, pass ' + + 'null for the original mapping instead of an object with empty or null values.' + ); + } + + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + + /** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ + SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + + SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + + /** + * Externalize the source map. + */ + SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + + /** + * Render the source map being generated to a string. + */ + SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + + exports.SourceMapGenerator = SourceMapGenerator; + + +/***/ }), +/* 2 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + var base64 = __webpack_require__(3); + + // A single base 64 digit can contain 6 bits of data. For the base 64 variable + // length quantities we use in the source map spec, the first bit is the sign, + // the next four bits are the actual value, and the 6th bit is the + // continuation bit. The continuation bit tells us whether there are more + // digits in this value following this digit. + // + // Continuation + // | Sign + // | | + // V V + // 101011 + + var VLQ_BASE_SHIFT = 5; + + // binary: 100000 + var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + + // binary: 011111 + var VLQ_BASE_MASK = VLQ_BASE - 1; + + // binary: 100000 + var VLQ_CONTINUATION_BIT = VLQ_BASE; + + /** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ + function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; + } + + /** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ + function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; + } + + /** + * Returns the base 64 VLQ encoded value. + */ + exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; + }; + + /** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ + exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; + }; + + +/***/ }), +/* 3 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + + /** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ + exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); + }; + + /** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ + exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; + }; + + +/***/ }), +/* 4 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + /** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ + function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } + } + exports.getArg = getArg; + + var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/; + var dataUrlRegexp = /^data:.+\,.+$/; + + function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; + } + exports.urlParse = urlParse; + + function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; + } + exports.urlGenerate = urlGenerate; + + /** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ + function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; + } + exports.normalize = normalize; + + /** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ + function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; + } + exports.join = join; + + exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp); + }; + + /** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ + function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); + } + exports.relative = relative; + + var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); + }()); + + function identity (s) { + return s; + } + + /** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ + function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; + } + exports.toSetString = supportsNullProto ? identity : toSetString; + + function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; + } + exports.fromSetString = supportsNullProto ? identity : fromSetString; + + function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; + } + + /** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ + function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; + } + exports.compareByOriginalPositions = compareByOriginalPositions; + + /** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ + function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; + } + exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + + function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; + } + + /** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ + function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + + +/***/ }), +/* 5 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var has = Object.prototype.hasOwnProperty; + var hasNativeMap = typeof Map !== "undefined"; + + /** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ + function ArraySet() { + this._array = []; + this._set = hasNativeMap ? new Map() : Object.create(null); + } + + /** + * Static method for creating ArraySet instances from an existing array. + */ + ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; + }; + + /** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ + ArraySet.prototype.size = function ArraySet_size() { + return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; + }; + + /** + * Add the given string to this set. + * + * @param String aStr + */ + ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = hasNativeMap ? aStr : util.toSetString(aStr); + var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + if (hasNativeMap) { + this._set.set(aStr, idx); + } else { + this._set[sStr] = idx; + } + } + }; + + /** + * Is the given string a member of this set? + * + * @param String aStr + */ + ArraySet.prototype.has = function ArraySet_has(aStr) { + if (hasNativeMap) { + return this._set.has(aStr); + } else { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + } + }; + + /** + * What is the index of the given string in the array? + * + * @param String aStr + */ + ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + if (hasNativeMap) { + var idx = this._set.get(aStr); + if (idx >= 0) { + return idx; + } + } else { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + } + + throw new Error('"' + aStr + '" is not in the set.'); + }; + + /** + * What is the element at the given index? + * + * @param Number aIdx + */ + ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); + }; + + /** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ + ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); + }; + + exports.ArraySet = ArraySet; + + +/***/ }), +/* 6 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + + /** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ + function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; + } + + /** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ + function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; + } + + /** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ + MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + + /** + * Add the given source mapping. + * + * @param Object aMapping + */ + MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } + }; + + /** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ + MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; + }; + + exports.MappingList = MappingList; + + +/***/ }), +/* 7 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var binarySearch = __webpack_require__(8); + var ArraySet = __webpack_require__(5).ArraySet; + var base64VLQ = __webpack_require__(2); + var quickSort = __webpack_require__(9).quickSort; + + function SourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap) + : new BasicSourceMapConsumer(sourceMap); + } + + SourceMapConsumer.fromSourceMap = function(aSourceMap) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap); + } + + /** + * The version of the source mapping spec that we are consuming. + */ + SourceMapConsumer.prototype._version = 3; + + // `__generatedMappings` and `__originalMappings` are arrays that hold the + // parsed mapping coordinates from the source map's "mappings" attribute. They + // are lazily instantiated, accessed via the `_generatedMappings` and + // `_originalMappings` getters respectively, and we only parse the mappings + // and create these arrays once queried for a source location. We jump through + // these hoops because there can be many thousands of mappings, and parsing + // them is expensive, so we only want to do it if we must. + // + // Each object in the arrays is of the form: + // + // { + // generatedLine: The line number in the generated code, + // generatedColumn: The column number in the generated code, + // source: The path to the original source file that generated this + // chunk of code, + // originalLine: The line number in the original source that + // corresponds to this chunk of generated code, + // originalColumn: The column number in the original source that + // corresponds to this chunk of generated code, + // name: The name of the original symbol which generated this chunk of + // code. + // } + // + // All properties except for `generatedLine` and `generatedColumn` can be + // `null`. + // + // `_generatedMappings` is ordered by the generated positions. + // + // `_originalMappings` is ordered by the original positions. + + SourceMapConsumer.prototype.__generatedMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } + }); + + SourceMapConsumer.prototype.__originalMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } + }); + + SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + + SourceMapConsumer.GENERATED_ORDER = 1; + SourceMapConsumer.ORIGINAL_ORDER = 2; + + SourceMapConsumer.GREATEST_LOWER_BOUND = 1; + SourceMapConsumer.LEAST_UPPER_BOUND = 2; + + /** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ + SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + if (source != null && sourceRoot != null) { + source = util.join(sourceRoot, source); + } + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + + /** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: Optional. the column number in the original source. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + if (this.sourceRoot != null) { + needle.source = util.relative(this.sourceRoot, needle.source); + } + if (!this._sources.has(needle.source)) { + return []; + } + needle.source = this._sources.indexOf(needle.source); + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + + exports.SourceMapConsumer = SourceMapConsumer; + + /** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The only parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ + function BasicSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this.file = file; + } + + BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + + /** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @returns BasicSourceMapConsumer + */ + BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + + /** + * The version of the source mapping spec that we are consuming. + */ + BasicSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._sources.toArray().map(function (s) { + return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s; + }, this); + } + }); + + /** + * Provide the JIT with a nice shape / hidden class. + */ + function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; + } + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + + /** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ + BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + + /** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ + BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ + BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + if (this.sourceRoot != null) { + source = util.join(this.sourceRoot, source); + } + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + if (this.sourceRoot != null) { + aSource = util.relative(this.sourceRoot, aSource); + } + + if (this._sources.has(aSource)) { + return this.sourcesContent[this._sources.indexOf(aSource)]; + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = aSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + aSource)) { + return this.sourcesContent[this._sources.indexOf("/" + aSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + if (this.sourceRoot != null) { + source = util.relative(this.sourceRoot, source); + } + if (!this._sources.has(source)) { + return { + line: null, + column: null, + lastColumn: null + }; + } + source = this._sources.indexOf(source); + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + + exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + + /** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The only parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ + function IndexedSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map')) + } + }); + } + + IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + + /** + * The version of the source mapping spec that we are consuming. + */ + IndexedSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } + }); + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ + IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + if (section.consumer.sourceRoot !== null) { + source = util.join(section.consumer.sourceRoot, source); + } + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + + exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; + + +/***/ }), +/* 8 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + exports.GREATEST_LOWER_BOUND = 1; + exports.LEAST_UPPER_BOUND = 2; + + /** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ + function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } + } + + /** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ + exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; + }; + + +/***/ }), +/* 9 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + // It turns out that some (most?) JavaScript engines don't self-host + // `Array.prototype.sort`. This makes sense because C++ will likely remain + // faster than JS when doing raw CPU-intensive sorting. However, when using a + // custom comparator function, calling back and forth between the VM's C++ and + // JIT'd JS is rather slow *and* loses JIT type information, resulting in + // worse generated code for the comparator function than would be optimal. In + // fact, when sorting with a comparator, these costs outweigh the benefits of + // sorting in C++. By using our own JS-implemented Quick Sort (below), we get + // a ~3500ms mean speed-up in `bench/bench.html`. + + /** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ + function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; + } + + /** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ + function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); + } + + /** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ + function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } + } + + /** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ + exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); + }; + + +/***/ }), +/* 10 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + var util = __webpack_require__(4); + + // Matches a Windows-style `\r\n` newline or a `\n` newline used by all other + // operating systems these days (capturing the result). + var REGEX_NEWLINE = /(\r?\n)/; + + // Newline character code for charCodeAt() comparisons + var NEWLINE_CODE = 10; + + // Private symbol for identifying `SourceNode`s when multiple versions of + // the source-map library are loaded. This MUST NOT CHANGE across + // versions! + var isSourceNode = "$$$isSourceNode$$$"; + + /** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ + function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); + } + + /** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ + SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var remainingLinesIndex = 0; + var shiftNextLine = function() { + var lineContents = getNextLine(); + // The last line of a file might not have a newline. + var newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[remainingLinesIndex]; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[remainingLinesIndex]; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + + /** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } + }; + + /** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ + SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; + }; + + /** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ + SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; + }; + + /** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ + SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + + /** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + + /** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ + SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; + }; + + /** + * Returns the string representation of this source node along with a source + * map. + */ + SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; + }; + + exports.SourceNode = SourceNode; + + +/***/ }) +/******/ ]) +}); +; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"sources":["webpack:///webpack/universalModuleDefinition","webpack:///webpack/bootstrap e4738fc72a7b23039889","webpack:///./source-map.js","webpack:///./lib/source-map-generator.js","webpack:///./lib/base64-vlq.js","webpack:///./lib/base64.js","webpack:///./lib/util.js","webpack:///./lib/array-set.js","webpack:///./lib/mapping-list.js","webpack:///./lib/source-map-consumer.js","webpack:///./lib/binary-search.js","webpack:///./lib/quick-sort.js","webpack:///./lib/source-node.js"],"names":[],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD,O;ACVA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA,uBAAe;AACf;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;;AAGA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACPA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA,MAAK;AACL;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,2CAA0C,SAAS;AACnD;AACA;;AAEA;AACA;AACA;AACA,qBAAoB;AACpB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;AC/ZA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,4DAA2D;AAC3D,qBAAoB;AACpB;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;;AAEH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,IAAG;;AAEH;AACA;AACA;;;;;;;AC3IA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,iBAAgB;AAChB,iBAAgB;;AAEhB,oBAAmB;AACnB,qBAAoB;;AAEpB,iBAAgB;AAChB,iBAAgB;;AAEhB,iBAAgB;AAChB,kBAAiB;;AAEjB;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;;;;;;AClEA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA,IAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,+CAA8C,QAAQ;AACtD;AACA;AACA;AACA,MAAK;AACL;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,EAAC;;AAED;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,4BAA2B,QAAQ;AACnC;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;;;;;;AChaA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,uCAAsC,SAAS;AAC/C;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;ACxHA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAgB;AAChB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;AC9EA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,uDAAsD;AACtD;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA,oBAAmB;AACnB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAW;;AAEX;AACA;AACA,QAAO;AACP;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAW;;AAEX;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,4BAA2B,MAAM;AACjC;AACA;AACA;AACA;AACA;AACA;AACA;AACA,uDAAsD;AACtD;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;;AAEL;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA,uDAAsD,YAAY;AAClE;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA,EAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,oCAAmC;AACnC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,0BAAyB,cAAc;AACvC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,wBAAuB,wCAAwC;AAC/D;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gDAA+C,mBAAmB,EAAE;AACpE;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,kBAAiB,oBAAoB;AACrC;AACA;AACA;AACA;AACA;AACA,8BAA6B,MAAM;AACnC;AACA,QAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA,uDAAsD;AACtD;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA,IAAG;AACH;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C,sBAAqB,+CAA+C;AACpE;AACA;AACA;AACA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,QAAO;AACP;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C;AACA;AACA,sBAAqB,4BAA4B;AACjD;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;;;;;;ACzjCA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;;;;;;AC9GA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,MAAM;AACjB;AACA,YAAW,OAAO;AAClB;AACA,YAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,OAAO;AAClB;AACA,YAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,MAAM;AACjB;AACA,YAAW,SAAS;AACpB;AACA,YAAW,OAAO;AAClB;AACA,YAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,OAAO;AAC1B;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,MAAM;AACjB;AACA,YAAW,SAAS;AACpB;AACA;AACA;AACA;AACA;;;;;;;ACjHA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;;AAEL;;AAEA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,kCAAiC,QAAQ;AACzC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,8CAA6C,SAAS;AACtD;AACA;AACA;AACA;AACA;AACA;AACA,qBAAoB;AACpB;AACA;AACA,uCAAsC;AACtC;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gBAAe,WAAW;AAC1B;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gDAA+C,SAAS;AACxD;AACA;AACA;AACA;;AAEA;AACA,0CAAyC,SAAS;AAClD;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAW;AACX;AACA;AACA;AACA,YAAW;AACX;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA;AACA,6CAA4C,cAAc;AAC1D;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA,cAAa;AACb;AACA;AACA;AACA,cAAa;AACb;AACA,YAAW;AACX;AACA,QAAO;AACP;AACA;AACA;AACA,IAAG;AACH;AACA;AACA,IAAG;;AAEH,WAAU;AACV;;AAEA","file":"source-map.debug.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"sourceMap\"] = factory();\n\telse\n\t\troot[\"sourceMap\"] = factory();\n})(this, function() {\nreturn \n\n\n// WEBPACK FOOTER //\n// webpack/universalModuleDefinition"," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId])\n \t\t\treturn installedModules[moduleId].exports;\n\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\texports: {},\n \t\t\tid: moduleId,\n \t\t\tloaded: false\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.loaded = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(0);\n\n\n\n// WEBPACK FOOTER //\n// webpack/bootstrap e4738fc72a7b23039889","/*\n * Copyright 2009-2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE.txt or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\nexports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator;\nexports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer;\nexports.SourceNode = require('./lib/source-node').SourceNode;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./source-map.js\n// module id = 0\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar base64VLQ = require('./base64-vlq');\nvar util = require('./util');\nvar ArraySet = require('./array-set').ArraySet;\nvar MappingList = require('./mapping-list').MappingList;\n\n/**\n * An instance of the SourceMapGenerator represents a source map which is\n * being built incrementally. You may pass an object with the following\n * properties:\n *\n *   - file: The filename of the generated source.\n *   - sourceRoot: A root for all relative URLs in this source map.\n */\nfunction SourceMapGenerator(aArgs) {\n  if (!aArgs) {\n    aArgs = {};\n  }\n  this._file = util.getArg(aArgs, 'file', null);\n  this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);\n  this._skipValidation = util.getArg(aArgs, 'skipValidation', false);\n  this._sources = new ArraySet();\n  this._names = new ArraySet();\n  this._mappings = new MappingList();\n  this._sourcesContents = null;\n}\n\nSourceMapGenerator.prototype._version = 3;\n\n/**\n * Creates a new SourceMapGenerator based on a SourceMapConsumer\n *\n * @param aSourceMapConsumer The SourceMap.\n */\nSourceMapGenerator.fromSourceMap =\n  function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {\n    var sourceRoot = aSourceMapConsumer.sourceRoot;\n    var generator = new SourceMapGenerator({\n      file: aSourceMapConsumer.file,\n      sourceRoot: sourceRoot\n    });\n    aSourceMapConsumer.eachMapping(function (mapping) {\n      var newMapping = {\n        generated: {\n          line: mapping.generatedLine,\n          column: mapping.generatedColumn\n        }\n      };\n\n      if (mapping.source != null) {\n        newMapping.source = mapping.source;\n        if (sourceRoot != null) {\n          newMapping.source = util.relative(sourceRoot, newMapping.source);\n        }\n\n        newMapping.original = {\n          line: mapping.originalLine,\n          column: mapping.originalColumn\n        };\n\n        if (mapping.name != null) {\n          newMapping.name = mapping.name;\n        }\n      }\n\n      generator.addMapping(newMapping);\n    });\n    aSourceMapConsumer.sources.forEach(function (sourceFile) {\n      var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n      if (content != null) {\n        generator.setSourceContent(sourceFile, content);\n      }\n    });\n    return generator;\n  };\n\n/**\n * Add a single mapping from original source line and column to the generated\n * source's line and column for this source map being created. The mapping\n * object should have the following properties:\n *\n *   - generated: An object with the generated line and column positions.\n *   - original: An object with the original line and column positions.\n *   - source: The original source file (relative to the sourceRoot).\n *   - name: An optional original token name for this mapping.\n */\nSourceMapGenerator.prototype.addMapping =\n  function SourceMapGenerator_addMapping(aArgs) {\n    var generated = util.getArg(aArgs, 'generated');\n    var original = util.getArg(aArgs, 'original', null);\n    var source = util.getArg(aArgs, 'source', null);\n    var name = util.getArg(aArgs, 'name', null);\n\n    if (!this._skipValidation) {\n      this._validateMapping(generated, original, source, name);\n    }\n\n    if (source != null) {\n      source = String(source);\n      if (!this._sources.has(source)) {\n        this._sources.add(source);\n      }\n    }\n\n    if (name != null) {\n      name = String(name);\n      if (!this._names.has(name)) {\n        this._names.add(name);\n      }\n    }\n\n    this._mappings.add({\n      generatedLine: generated.line,\n      generatedColumn: generated.column,\n      originalLine: original != null && original.line,\n      originalColumn: original != null && original.column,\n      source: source,\n      name: name\n    });\n  };\n\n/**\n * Set the source content for a source file.\n */\nSourceMapGenerator.prototype.setSourceContent =\n  function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {\n    var source = aSourceFile;\n    if (this._sourceRoot != null) {\n      source = util.relative(this._sourceRoot, source);\n    }\n\n    if (aSourceContent != null) {\n      // Add the source content to the _sourcesContents map.\n      // Create a new _sourcesContents map if the property is null.\n      if (!this._sourcesContents) {\n        this._sourcesContents = Object.create(null);\n      }\n      this._sourcesContents[util.toSetString(source)] = aSourceContent;\n    } else if (this._sourcesContents) {\n      // Remove the source file from the _sourcesContents map.\n      // If the _sourcesContents map is empty, set the property to null.\n      delete this._sourcesContents[util.toSetString(source)];\n      if (Object.keys(this._sourcesContents).length === 0) {\n        this._sourcesContents = null;\n      }\n    }\n  };\n\n/**\n * Applies the mappings of a sub-source-map for a specific source file to the\n * source map being generated. Each mapping to the supplied source file is\n * rewritten using the supplied source map. Note: The resolution for the\n * resulting mappings is the minimium of this map and the supplied map.\n *\n * @param aSourceMapConsumer The source map to be applied.\n * @param aSourceFile Optional. The filename of the source file.\n *        If omitted, SourceMapConsumer's file property will be used.\n * @param aSourceMapPath Optional. The dirname of the path to the source map\n *        to be applied. If relative, it is relative to the SourceMapConsumer.\n *        This parameter is needed when the two source maps aren't in the same\n *        directory, and the source map to be applied contains relative source\n *        paths. If so, those relative source paths need to be rewritten\n *        relative to the SourceMapGenerator.\n */\nSourceMapGenerator.prototype.applySourceMap =\n  function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {\n    var sourceFile = aSourceFile;\n    // If aSourceFile is omitted, we will use the file property of the SourceMap\n    if (aSourceFile == null) {\n      if (aSourceMapConsumer.file == null) {\n        throw new Error(\n          'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +\n          'or the source map\\'s \"file\" property. Both were omitted.'\n        );\n      }\n      sourceFile = aSourceMapConsumer.file;\n    }\n    var sourceRoot = this._sourceRoot;\n    // Make \"sourceFile\" relative if an absolute Url is passed.\n    if (sourceRoot != null) {\n      sourceFile = util.relative(sourceRoot, sourceFile);\n    }\n    // Applying the SourceMap can add and remove items from the sources and\n    // the names array.\n    var newSources = new ArraySet();\n    var newNames = new ArraySet();\n\n    // Find mappings for the \"sourceFile\"\n    this._mappings.unsortedForEach(function (mapping) {\n      if (mapping.source === sourceFile && mapping.originalLine != null) {\n        // Check if it can be mapped by the source map, then update the mapping.\n        var original = aSourceMapConsumer.originalPositionFor({\n          line: mapping.originalLine,\n          column: mapping.originalColumn\n        });\n        if (original.source != null) {\n          // Copy mapping\n          mapping.source = original.source;\n          if (aSourceMapPath != null) {\n            mapping.source = util.join(aSourceMapPath, mapping.source)\n          }\n          if (sourceRoot != null) {\n            mapping.source = util.relative(sourceRoot, mapping.source);\n          }\n          mapping.originalLine = original.line;\n          mapping.originalColumn = original.column;\n          if (original.name != null) {\n            mapping.name = original.name;\n          }\n        }\n      }\n\n      var source = mapping.source;\n      if (source != null && !newSources.has(source)) {\n        newSources.add(source);\n      }\n\n      var name = mapping.name;\n      if (name != null && !newNames.has(name)) {\n        newNames.add(name);\n      }\n\n    }, this);\n    this._sources = newSources;\n    this._names = newNames;\n\n    // Copy sourcesContents of applied map.\n    aSourceMapConsumer.sources.forEach(function (sourceFile) {\n      var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n      if (content != null) {\n        if (aSourceMapPath != null) {\n          sourceFile = util.join(aSourceMapPath, sourceFile);\n        }\n        if (sourceRoot != null) {\n          sourceFile = util.relative(sourceRoot, sourceFile);\n        }\n        this.setSourceContent(sourceFile, content);\n      }\n    }, this);\n  };\n\n/**\n * A mapping can have one of the three levels of data:\n *\n *   1. Just the generated position.\n *   2. The Generated position, original position, and original source.\n *   3. Generated and original position, original source, as well as a name\n *      token.\n *\n * To maintain consistency, we validate that any new mapping being added falls\n * in to one of these categories.\n */\nSourceMapGenerator.prototype._validateMapping =\n  function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,\n                                              aName) {\n    // When aOriginal is truthy but has empty values for .line and .column,\n    // it is most likely a programmer error. In this case we throw a very\n    // specific error message to try to guide them the right way.\n    // For example: https://github.com/Polymer/polymer-bundler/pull/519\n    if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') {\n        throw new Error(\n            'original.line and original.column are not numbers -- you probably meant to omit ' +\n            'the original mapping entirely and only map the generated position. If so, pass ' +\n            'null for the original mapping instead of an object with empty or null values.'\n        );\n    }\n\n    if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n        && aGenerated.line > 0 && aGenerated.column >= 0\n        && !aOriginal && !aSource && !aName) {\n      // Case 1.\n      return;\n    }\n    else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n             && aOriginal && 'line' in aOriginal && 'column' in aOriginal\n             && aGenerated.line > 0 && aGenerated.column >= 0\n             && aOriginal.line > 0 && aOriginal.column >= 0\n             && aSource) {\n      // Cases 2 and 3.\n      return;\n    }\n    else {\n      throw new Error('Invalid mapping: ' + JSON.stringify({\n        generated: aGenerated,\n        source: aSource,\n        original: aOriginal,\n        name: aName\n      }));\n    }\n  };\n\n/**\n * Serialize the accumulated mappings in to the stream of base 64 VLQs\n * specified by the source map format.\n */\nSourceMapGenerator.prototype._serializeMappings =\n  function SourceMapGenerator_serializeMappings() {\n    var previousGeneratedColumn = 0;\n    var previousGeneratedLine = 1;\n    var previousOriginalColumn = 0;\n    var previousOriginalLine = 0;\n    var previousName = 0;\n    var previousSource = 0;\n    var result = '';\n    var next;\n    var mapping;\n    var nameIdx;\n    var sourceIdx;\n\n    var mappings = this._mappings.toArray();\n    for (var i = 0, len = mappings.length; i < len; i++) {\n      mapping = mappings[i];\n      next = ''\n\n      if (mapping.generatedLine !== previousGeneratedLine) {\n        previousGeneratedColumn = 0;\n        while (mapping.generatedLine !== previousGeneratedLine) {\n          next += ';';\n          previousGeneratedLine++;\n        }\n      }\n      else {\n        if (i > 0) {\n          if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {\n            continue;\n          }\n          next += ',';\n        }\n      }\n\n      next += base64VLQ.encode(mapping.generatedColumn\n                                 - previousGeneratedColumn);\n      previousGeneratedColumn = mapping.generatedColumn;\n\n      if (mapping.source != null) {\n        sourceIdx = this._sources.indexOf(mapping.source);\n        next += base64VLQ.encode(sourceIdx - previousSource);\n        previousSource = sourceIdx;\n\n        // lines are stored 0-based in SourceMap spec version 3\n        next += base64VLQ.encode(mapping.originalLine - 1\n                                   - previousOriginalLine);\n        previousOriginalLine = mapping.originalLine - 1;\n\n        next += base64VLQ.encode(mapping.originalColumn\n                                   - previousOriginalColumn);\n        previousOriginalColumn = mapping.originalColumn;\n\n        if (mapping.name != null) {\n          nameIdx = this._names.indexOf(mapping.name);\n          next += base64VLQ.encode(nameIdx - previousName);\n          previousName = nameIdx;\n        }\n      }\n\n      result += next;\n    }\n\n    return result;\n  };\n\nSourceMapGenerator.prototype._generateSourcesContent =\n  function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {\n    return aSources.map(function (source) {\n      if (!this._sourcesContents) {\n        return null;\n      }\n      if (aSourceRoot != null) {\n        source = util.relative(aSourceRoot, source);\n      }\n      var key = util.toSetString(source);\n      return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)\n        ? this._sourcesContents[key]\n        : null;\n    }, this);\n  };\n\n/**\n * Externalize the source map.\n */\nSourceMapGenerator.prototype.toJSON =\n  function SourceMapGenerator_toJSON() {\n    var map = {\n      version: this._version,\n      sources: this._sources.toArray(),\n      names: this._names.toArray(),\n      mappings: this._serializeMappings()\n    };\n    if (this._file != null) {\n      map.file = this._file;\n    }\n    if (this._sourceRoot != null) {\n      map.sourceRoot = this._sourceRoot;\n    }\n    if (this._sourcesContents) {\n      map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);\n    }\n\n    return map;\n  };\n\n/**\n * Render the source map being generated to a string.\n */\nSourceMapGenerator.prototype.toString =\n  function SourceMapGenerator_toString() {\n    return JSON.stringify(this.toJSON());\n  };\n\nexports.SourceMapGenerator = SourceMapGenerator;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-map-generator.js\n// module id = 1\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n *\n * Based on the Base 64 VLQ implementation in Closure Compiler:\n * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java\n *\n * Copyright 2011 The Closure Compiler Authors. All rights reserved.\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are\n * met:\n *\n *  * Redistributions of source code must retain the above copyright\n *    notice, this list of conditions and the following disclaimer.\n *  * Redistributions in binary form must reproduce the above\n *    copyright notice, this list of conditions and the following\n *    disclaimer in the documentation and/or other materials provided\n *    with the distribution.\n *  * Neither the name of Google Inc. nor the names of its\n *    contributors may be used to endorse or promote products derived\n *    from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n * \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\nvar base64 = require('./base64');\n\n// A single base 64 digit can contain 6 bits of data. For the base 64 variable\n// length quantities we use in the source map spec, the first bit is the sign,\n// the next four bits are the actual value, and the 6th bit is the\n// continuation bit. The continuation bit tells us whether there are more\n// digits in this value following this digit.\n//\n//   Continuation\n//   |    Sign\n//   |    |\n//   V    V\n//   101011\n\nvar VLQ_BASE_SHIFT = 5;\n\n// binary: 100000\nvar VLQ_BASE = 1 << VLQ_BASE_SHIFT;\n\n// binary: 011111\nvar VLQ_BASE_MASK = VLQ_BASE - 1;\n\n// binary: 100000\nvar VLQ_CONTINUATION_BIT = VLQ_BASE;\n\n/**\n * Converts from a two-complement value to a value where the sign bit is\n * placed in the least significant bit.  For example, as decimals:\n *   1 becomes 2 (10 binary), -1 becomes 3 (11 binary)\n *   2 becomes 4 (100 binary), -2 becomes 5 (101 binary)\n */\nfunction toVLQSigned(aValue) {\n  return aValue < 0\n    ? ((-aValue) << 1) + 1\n    : (aValue << 1) + 0;\n}\n\n/**\n * Converts to a two-complement value from a value where the sign bit is\n * placed in the least significant bit.  For example, as decimals:\n *   2 (10 binary) becomes 1, 3 (11 binary) becomes -1\n *   4 (100 binary) becomes 2, 5 (101 binary) becomes -2\n */\nfunction fromVLQSigned(aValue) {\n  var isNegative = (aValue & 1) === 1;\n  var shifted = aValue >> 1;\n  return isNegative\n    ? -shifted\n    : shifted;\n}\n\n/**\n * Returns the base 64 VLQ encoded value.\n */\nexports.encode = function base64VLQ_encode(aValue) {\n  var encoded = \"\";\n  var digit;\n\n  var vlq = toVLQSigned(aValue);\n\n  do {\n    digit = vlq & VLQ_BASE_MASK;\n    vlq >>>= VLQ_BASE_SHIFT;\n    if (vlq > 0) {\n      // There are still more digits in this value, so we must make sure the\n      // continuation bit is marked.\n      digit |= VLQ_CONTINUATION_BIT;\n    }\n    encoded += base64.encode(digit);\n  } while (vlq > 0);\n\n  return encoded;\n};\n\n/**\n * Decodes the next base 64 VLQ value from the given string and returns the\n * value and the rest of the string via the out parameter.\n */\nexports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {\n  var strLen = aStr.length;\n  var result = 0;\n  var shift = 0;\n  var continuation, digit;\n\n  do {\n    if (aIndex >= strLen) {\n      throw new Error(\"Expected more digits in base 64 VLQ value.\");\n    }\n\n    digit = base64.decode(aStr.charCodeAt(aIndex++));\n    if (digit === -1) {\n      throw new Error(\"Invalid base64 digit: \" + aStr.charAt(aIndex - 1));\n    }\n\n    continuation = !!(digit & VLQ_CONTINUATION_BIT);\n    digit &= VLQ_BASE_MASK;\n    result = result + (digit << shift);\n    shift += VLQ_BASE_SHIFT;\n  } while (continuation);\n\n  aOutParam.value = fromVLQSigned(result);\n  aOutParam.rest = aIndex;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/base64-vlq.js\n// module id = 2\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');\n\n/**\n * Encode an integer in the range of 0 to 63 to a single base 64 digit.\n */\nexports.encode = function (number) {\n  if (0 <= number && number < intToCharMap.length) {\n    return intToCharMap[number];\n  }\n  throw new TypeError(\"Must be between 0 and 63: \" + number);\n};\n\n/**\n * Decode a single base 64 character code digit to an integer. Returns -1 on\n * failure.\n */\nexports.decode = function (charCode) {\n  var bigA = 65;     // 'A'\n  var bigZ = 90;     // 'Z'\n\n  var littleA = 97;  // 'a'\n  var littleZ = 122; // 'z'\n\n  var zero = 48;     // '0'\n  var nine = 57;     // '9'\n\n  var plus = 43;     // '+'\n  var slash = 47;    // '/'\n\n  var littleOffset = 26;\n  var numberOffset = 52;\n\n  // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ\n  if (bigA <= charCode && charCode <= bigZ) {\n    return (charCode - bigA);\n  }\n\n  // 26 - 51: abcdefghijklmnopqrstuvwxyz\n  if (littleA <= charCode && charCode <= littleZ) {\n    return (charCode - littleA + littleOffset);\n  }\n\n  // 52 - 61: 0123456789\n  if (zero <= charCode && charCode <= nine) {\n    return (charCode - zero + numberOffset);\n  }\n\n  // 62: +\n  if (charCode == plus) {\n    return 62;\n  }\n\n  // 63: /\n  if (charCode == slash) {\n    return 63;\n  }\n\n  // Invalid base64 digit.\n  return -1;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/base64.js\n// module id = 3\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n/**\n * This is a helper function for getting values from parameter/options\n * objects.\n *\n * @param args The object we are extracting values from\n * @param name The name of the property we are getting.\n * @param defaultValue An optional value to return if the property is missing\n * from the object. If this is not specified and the property is missing, an\n * error will be thrown.\n */\nfunction getArg(aArgs, aName, aDefaultValue) {\n  if (aName in aArgs) {\n    return aArgs[aName];\n  } else if (arguments.length === 3) {\n    return aDefaultValue;\n  } else {\n    throw new Error('\"' + aName + '\" is a required argument.');\n  }\n}\nexports.getArg = getArg;\n\nvar urlRegexp = /^(?:([\\w+\\-.]+):)?\\/\\/(?:(\\w+:\\w+)@)?([\\w.]*)(?::(\\d+))?(\\S*)$/;\nvar dataUrlRegexp = /^data:.+\\,.+$/;\n\nfunction urlParse(aUrl) {\n  var match = aUrl.match(urlRegexp);\n  if (!match) {\n    return null;\n  }\n  return {\n    scheme: match[1],\n    auth: match[2],\n    host: match[3],\n    port: match[4],\n    path: match[5]\n  };\n}\nexports.urlParse = urlParse;\n\nfunction urlGenerate(aParsedUrl) {\n  var url = '';\n  if (aParsedUrl.scheme) {\n    url += aParsedUrl.scheme + ':';\n  }\n  url += '//';\n  if (aParsedUrl.auth) {\n    url += aParsedUrl.auth + '@';\n  }\n  if (aParsedUrl.host) {\n    url += aParsedUrl.host;\n  }\n  if (aParsedUrl.port) {\n    url += \":\" + aParsedUrl.port\n  }\n  if (aParsedUrl.path) {\n    url += aParsedUrl.path;\n  }\n  return url;\n}\nexports.urlGenerate = urlGenerate;\n\n/**\n * Normalizes a path, or the path portion of a URL:\n *\n * - Replaces consecutive slashes with one slash.\n * - Removes unnecessary '.' parts.\n * - Removes unnecessary '<dir>/..' parts.\n *\n * Based on code in the Node.js 'path' core module.\n *\n * @param aPath The path or url to normalize.\n */\nfunction normalize(aPath) {\n  var path = aPath;\n  var url = urlParse(aPath);\n  if (url) {\n    if (!url.path) {\n      return aPath;\n    }\n    path = url.path;\n  }\n  var isAbsolute = exports.isAbsolute(path);\n\n  var parts = path.split(/\\/+/);\n  for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {\n    part = parts[i];\n    if (part === '.') {\n      parts.splice(i, 1);\n    } else if (part === '..') {\n      up++;\n    } else if (up > 0) {\n      if (part === '') {\n        // The first part is blank if the path is absolute. Trying to go\n        // above the root is a no-op. Therefore we can remove all '..' parts\n        // directly after the root.\n        parts.splice(i + 1, up);\n        up = 0;\n      } else {\n        parts.splice(i, 2);\n        up--;\n      }\n    }\n  }\n  path = parts.join('/');\n\n  if (path === '') {\n    path = isAbsolute ? '/' : '.';\n  }\n\n  if (url) {\n    url.path = path;\n    return urlGenerate(url);\n  }\n  return path;\n}\nexports.normalize = normalize;\n\n/**\n * Joins two paths/URLs.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be joined with the root.\n *\n * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a\n *   scheme-relative URL: Then the scheme of aRoot, if any, is prepended\n *   first.\n * - Otherwise aPath is a path. If aRoot is a URL, then its path portion\n *   is updated with the result and aRoot is returned. Otherwise the result\n *   is returned.\n *   - If aPath is absolute, the result is aPath.\n *   - Otherwise the two paths are joined with a slash.\n * - Joining for example 'http://' and 'www.example.com' is also supported.\n */\nfunction join(aRoot, aPath) {\n  if (aRoot === \"\") {\n    aRoot = \".\";\n  }\n  if (aPath === \"\") {\n    aPath = \".\";\n  }\n  var aPathUrl = urlParse(aPath);\n  var aRootUrl = urlParse(aRoot);\n  if (aRootUrl) {\n    aRoot = aRootUrl.path || '/';\n  }\n\n  // `join(foo, '//www.example.org')`\n  if (aPathUrl && !aPathUrl.scheme) {\n    if (aRootUrl) {\n      aPathUrl.scheme = aRootUrl.scheme;\n    }\n    return urlGenerate(aPathUrl);\n  }\n\n  if (aPathUrl || aPath.match(dataUrlRegexp)) {\n    return aPath;\n  }\n\n  // `join('http://', 'www.example.com')`\n  if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {\n    aRootUrl.host = aPath;\n    return urlGenerate(aRootUrl);\n  }\n\n  var joined = aPath.charAt(0) === '/'\n    ? aPath\n    : normalize(aRoot.replace(/\\/+$/, '') + '/' + aPath);\n\n  if (aRootUrl) {\n    aRootUrl.path = joined;\n    return urlGenerate(aRootUrl);\n  }\n  return joined;\n}\nexports.join = join;\n\nexports.isAbsolute = function (aPath) {\n  return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp);\n};\n\n/**\n * Make a path relative to a URL or another path.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be made relative to aRoot.\n */\nfunction relative(aRoot, aPath) {\n  if (aRoot === \"\") {\n    aRoot = \".\";\n  }\n\n  aRoot = aRoot.replace(/\\/$/, '');\n\n  // It is possible for the path to be above the root. In this case, simply\n  // checking whether the root is a prefix of the path won't work. Instead, we\n  // need to remove components from the root one by one, until either we find\n  // a prefix that fits, or we run out of components to remove.\n  var level = 0;\n  while (aPath.indexOf(aRoot + '/') !== 0) {\n    var index = aRoot.lastIndexOf(\"/\");\n    if (index < 0) {\n      return aPath;\n    }\n\n    // If the only part of the root that is left is the scheme (i.e. http://,\n    // file:///, etc.), one or more slashes (/), or simply nothing at all, we\n    // have exhausted all components, so the path is not relative to the root.\n    aRoot = aRoot.slice(0, index);\n    if (aRoot.match(/^([^\\/]+:\\/)?\\/*$/)) {\n      return aPath;\n    }\n\n    ++level;\n  }\n\n  // Make sure we add a \"../\" for each component we removed from the root.\n  return Array(level + 1).join(\"../\") + aPath.substr(aRoot.length + 1);\n}\nexports.relative = relative;\n\nvar supportsNullProto = (function () {\n  var obj = Object.create(null);\n  return !('__proto__' in obj);\n}());\n\nfunction identity (s) {\n  return s;\n}\n\n/**\n * Because behavior goes wacky when you set `__proto__` on objects, we\n * have to prefix all the strings in our set with an arbitrary character.\n *\n * See https://github.com/mozilla/source-map/pull/31 and\n * https://github.com/mozilla/source-map/issues/30\n *\n * @param String aStr\n */\nfunction toSetString(aStr) {\n  if (isProtoString(aStr)) {\n    return '$' + aStr;\n  }\n\n  return aStr;\n}\nexports.toSetString = supportsNullProto ? identity : toSetString;\n\nfunction fromSetString(aStr) {\n  if (isProtoString(aStr)) {\n    return aStr.slice(1);\n  }\n\n  return aStr;\n}\nexports.fromSetString = supportsNullProto ? identity : fromSetString;\n\nfunction isProtoString(s) {\n  if (!s) {\n    return false;\n  }\n\n  var length = s.length;\n\n  if (length < 9 /* \"__proto__\".length */) {\n    return false;\n  }\n\n  if (s.charCodeAt(length - 1) !== 95  /* '_' */ ||\n      s.charCodeAt(length - 2) !== 95  /* '_' */ ||\n      s.charCodeAt(length - 3) !== 111 /* 'o' */ ||\n      s.charCodeAt(length - 4) !== 116 /* 't' */ ||\n      s.charCodeAt(length - 5) !== 111 /* 'o' */ ||\n      s.charCodeAt(length - 6) !== 114 /* 'r' */ ||\n      s.charCodeAt(length - 7) !== 112 /* 'p' */ ||\n      s.charCodeAt(length - 8) !== 95  /* '_' */ ||\n      s.charCodeAt(length - 9) !== 95  /* '_' */) {\n    return false;\n  }\n\n  for (var i = length - 10; i >= 0; i--) {\n    if (s.charCodeAt(i) !== 36 /* '$' */) {\n      return false;\n    }\n  }\n\n  return true;\n}\n\n/**\n * Comparator between two mappings where the original positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same original source/line/column, but different generated\n * line and column the same. Useful when searching for a mapping with a\n * stubbed out mapping.\n */\nfunction compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {\n  var cmp = mappingA.source - mappingB.source;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalLine - mappingB.originalLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalColumn - mappingB.originalColumn;\n  if (cmp !== 0 || onlyCompareOriginal) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedLine - mappingB.generatedLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  return mappingA.name - mappingB.name;\n}\nexports.compareByOriginalPositions = compareByOriginalPositions;\n\n/**\n * Comparator between two mappings with deflated source and name indices where\n * the generated positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same generated line and column, but different\n * source/name/original line and column the same. Useful when searching for a\n * mapping with a stubbed out mapping.\n */\nfunction compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {\n  var cmp = mappingA.generatedLine - mappingB.generatedLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n  if (cmp !== 0 || onlyCompareGenerated) {\n    return cmp;\n  }\n\n  cmp = mappingA.source - mappingB.source;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalLine - mappingB.originalLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalColumn - mappingB.originalColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  return mappingA.name - mappingB.name;\n}\nexports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;\n\nfunction strcmp(aStr1, aStr2) {\n  if (aStr1 === aStr2) {\n    return 0;\n  }\n\n  if (aStr1 > aStr2) {\n    return 1;\n  }\n\n  return -1;\n}\n\n/**\n * Comparator between two mappings with inflated source and name strings where\n * the generated positions are compared.\n */\nfunction compareByGeneratedPositionsInflated(mappingA, mappingB) {\n  var cmp = mappingA.generatedLine - mappingB.generatedLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = strcmp(mappingA.source, mappingB.source);\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalLine - mappingB.originalLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalColumn - mappingB.originalColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/util.js\n// module id = 4\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar has = Object.prototype.hasOwnProperty;\nvar hasNativeMap = typeof Map !== \"undefined\";\n\n/**\n * A data structure which is a combination of an array and a set. Adding a new\n * member is O(1), testing for membership is O(1), and finding the index of an\n * element is O(1). Removing elements from the set is not supported. Only\n * strings are supported for membership.\n */\nfunction ArraySet() {\n  this._array = [];\n  this._set = hasNativeMap ? new Map() : Object.create(null);\n}\n\n/**\n * Static method for creating ArraySet instances from an existing array.\n */\nArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {\n  var set = new ArraySet();\n  for (var i = 0, len = aArray.length; i < len; i++) {\n    set.add(aArray[i], aAllowDuplicates);\n  }\n  return set;\n};\n\n/**\n * Return how many unique items are in this ArraySet. If duplicates have been\n * added, than those do not count towards the size.\n *\n * @returns Number\n */\nArraySet.prototype.size = function ArraySet_size() {\n  return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length;\n};\n\n/**\n * Add the given string to this set.\n *\n * @param String aStr\n */\nArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {\n  var sStr = hasNativeMap ? aStr : util.toSetString(aStr);\n  var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr);\n  var idx = this._array.length;\n  if (!isDuplicate || aAllowDuplicates) {\n    this._array.push(aStr);\n  }\n  if (!isDuplicate) {\n    if (hasNativeMap) {\n      this._set.set(aStr, idx);\n    } else {\n      this._set[sStr] = idx;\n    }\n  }\n};\n\n/**\n * Is the given string a member of this set?\n *\n * @param String aStr\n */\nArraySet.prototype.has = function ArraySet_has(aStr) {\n  if (hasNativeMap) {\n    return this._set.has(aStr);\n  } else {\n    var sStr = util.toSetString(aStr);\n    return has.call(this._set, sStr);\n  }\n};\n\n/**\n * What is the index of the given string in the array?\n *\n * @param String aStr\n */\nArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {\n  if (hasNativeMap) {\n    var idx = this._set.get(aStr);\n    if (idx >= 0) {\n        return idx;\n    }\n  } else {\n    var sStr = util.toSetString(aStr);\n    if (has.call(this._set, sStr)) {\n      return this._set[sStr];\n    }\n  }\n\n  throw new Error('\"' + aStr + '\" is not in the set.');\n};\n\n/**\n * What is the element at the given index?\n *\n * @param Number aIdx\n */\nArraySet.prototype.at = function ArraySet_at(aIdx) {\n  if (aIdx >= 0 && aIdx < this._array.length) {\n    return this._array[aIdx];\n  }\n  throw new Error('No element indexed by ' + aIdx);\n};\n\n/**\n * Returns the array representation of this set (which has the proper indices\n * indicated by indexOf). Note that this is a copy of the internal array used\n * for storing the members so that no one can mess with internal state.\n */\nArraySet.prototype.toArray = function ArraySet_toArray() {\n  return this._array.slice();\n};\n\nexports.ArraySet = ArraySet;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/array-set.js\n// module id = 5\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2014 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\n\n/**\n * Determine whether mappingB is after mappingA with respect to generated\n * position.\n */\nfunction generatedPositionAfter(mappingA, mappingB) {\n  // Optimized for most common case\n  var lineA = mappingA.generatedLine;\n  var lineB = mappingB.generatedLine;\n  var columnA = mappingA.generatedColumn;\n  var columnB = mappingB.generatedColumn;\n  return lineB > lineA || lineB == lineA && columnB >= columnA ||\n         util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n}\n\n/**\n * A data structure to provide a sorted view of accumulated mappings in a\n * performance conscious manner. It trades a neglibable overhead in general\n * case for a large speedup in case of mappings being added in order.\n */\nfunction MappingList() {\n  this._array = [];\n  this._sorted = true;\n  // Serves as infimum\n  this._last = {generatedLine: -1, generatedColumn: 0};\n}\n\n/**\n * Iterate through internal items. This method takes the same arguments that\n * `Array.prototype.forEach` takes.\n *\n * NOTE: The order of the mappings is NOT guaranteed.\n */\nMappingList.prototype.unsortedForEach =\n  function MappingList_forEach(aCallback, aThisArg) {\n    this._array.forEach(aCallback, aThisArg);\n  };\n\n/**\n * Add the given source mapping.\n *\n * @param Object aMapping\n */\nMappingList.prototype.add = function MappingList_add(aMapping) {\n  if (generatedPositionAfter(this._last, aMapping)) {\n    this._last = aMapping;\n    this._array.push(aMapping);\n  } else {\n    this._sorted = false;\n    this._array.push(aMapping);\n  }\n};\n\n/**\n * Returns the flat, sorted array of mappings. The mappings are sorted by\n * generated position.\n *\n * WARNING: This method returns internal data without copying, for\n * performance. The return value must NOT be mutated, and should be treated as\n * an immutable borrow. If you want to take ownership, you must make your own\n * copy.\n */\nMappingList.prototype.toArray = function MappingList_toArray() {\n  if (!this._sorted) {\n    this._array.sort(util.compareByGeneratedPositionsInflated);\n    this._sorted = true;\n  }\n  return this._array;\n};\n\nexports.MappingList = MappingList;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/mapping-list.js\n// module id = 6\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar binarySearch = require('./binary-search');\nvar ArraySet = require('./array-set').ArraySet;\nvar base64VLQ = require('./base64-vlq');\nvar quickSort = require('./quick-sort').quickSort;\n\nfunction SourceMapConsumer(aSourceMap) {\n  var sourceMap = aSourceMap;\n  if (typeof aSourceMap === 'string') {\n    sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n  }\n\n  return sourceMap.sections != null\n    ? new IndexedSourceMapConsumer(sourceMap)\n    : new BasicSourceMapConsumer(sourceMap);\n}\n\nSourceMapConsumer.fromSourceMap = function(aSourceMap) {\n  return BasicSourceMapConsumer.fromSourceMap(aSourceMap);\n}\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nSourceMapConsumer.prototype._version = 3;\n\n// `__generatedMappings` and `__originalMappings` are arrays that hold the\n// parsed mapping coordinates from the source map's \"mappings\" attribute. They\n// are lazily instantiated, accessed via the `_generatedMappings` and\n// `_originalMappings` getters respectively, and we only parse the mappings\n// and create these arrays once queried for a source location. We jump through\n// these hoops because there can be many thousands of mappings, and parsing\n// them is expensive, so we only want to do it if we must.\n//\n// Each object in the arrays is of the form:\n//\n//     {\n//       generatedLine: The line number in the generated code,\n//       generatedColumn: The column number in the generated code,\n//       source: The path to the original source file that generated this\n//               chunk of code,\n//       originalLine: The line number in the original source that\n//                     corresponds to this chunk of generated code,\n//       originalColumn: The column number in the original source that\n//                       corresponds to this chunk of generated code,\n//       name: The name of the original symbol which generated this chunk of\n//             code.\n//     }\n//\n// All properties except for `generatedLine` and `generatedColumn` can be\n// `null`.\n//\n// `_generatedMappings` is ordered by the generated positions.\n//\n// `_originalMappings` is ordered by the original positions.\n\nSourceMapConsumer.prototype.__generatedMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {\n  get: function () {\n    if (!this.__generatedMappings) {\n      this._parseMappings(this._mappings, this.sourceRoot);\n    }\n\n    return this.__generatedMappings;\n  }\n});\n\nSourceMapConsumer.prototype.__originalMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {\n  get: function () {\n    if (!this.__originalMappings) {\n      this._parseMappings(this._mappings, this.sourceRoot);\n    }\n\n    return this.__originalMappings;\n  }\n});\n\nSourceMapConsumer.prototype._charIsMappingSeparator =\n  function SourceMapConsumer_charIsMappingSeparator(aStr, index) {\n    var c = aStr.charAt(index);\n    return c === \";\" || c === \",\";\n  };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nSourceMapConsumer.prototype._parseMappings =\n  function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n    throw new Error(\"Subclasses must implement _parseMappings\");\n  };\n\nSourceMapConsumer.GENERATED_ORDER = 1;\nSourceMapConsumer.ORIGINAL_ORDER = 2;\n\nSourceMapConsumer.GREATEST_LOWER_BOUND = 1;\nSourceMapConsumer.LEAST_UPPER_BOUND = 2;\n\n/**\n * Iterate over each mapping between an original source/line/column and a\n * generated line/column in this source map.\n *\n * @param Function aCallback\n *        The function that is called with each mapping.\n * @param Object aContext\n *        Optional. If specified, this object will be the value of `this` every\n *        time that `aCallback` is called.\n * @param aOrder\n *        Either `SourceMapConsumer.GENERATED_ORDER` or\n *        `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to\n *        iterate over the mappings sorted by the generated file's line/column\n *        order or the original's source/line/column order, respectively. Defaults to\n *        `SourceMapConsumer.GENERATED_ORDER`.\n */\nSourceMapConsumer.prototype.eachMapping =\n  function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {\n    var context = aContext || null;\n    var order = aOrder || SourceMapConsumer.GENERATED_ORDER;\n\n    var mappings;\n    switch (order) {\n    case SourceMapConsumer.GENERATED_ORDER:\n      mappings = this._generatedMappings;\n      break;\n    case SourceMapConsumer.ORIGINAL_ORDER:\n      mappings = this._originalMappings;\n      break;\n    default:\n      throw new Error(\"Unknown order of iteration.\");\n    }\n\n    var sourceRoot = this.sourceRoot;\n    mappings.map(function (mapping) {\n      var source = mapping.source === null ? null : this._sources.at(mapping.source);\n      if (source != null && sourceRoot != null) {\n        source = util.join(sourceRoot, source);\n      }\n      return {\n        source: source,\n        generatedLine: mapping.generatedLine,\n        generatedColumn: mapping.generatedColumn,\n        originalLine: mapping.originalLine,\n        originalColumn: mapping.originalColumn,\n        name: mapping.name === null ? null : this._names.at(mapping.name)\n      };\n    }, this).forEach(aCallback, context);\n  };\n\n/**\n * Returns all generated line and column information for the original source,\n * line, and column provided. If no column is provided, returns all mappings\n * corresponding to a either the line we are searching for or the next\n * closest line that has any mappings. Otherwise, returns all mappings\n * corresponding to the given line and either the column we are searching for\n * or the next closest column that has any offsets.\n *\n * The only argument is an object with the following properties:\n *\n *   - source: The filename of the original source.\n *   - line: The line number in the original source.\n *   - column: Optional. the column number in the original source.\n *\n * and an array of objects is returned, each with the following properties:\n *\n *   - line: The line number in the generated source, or null.\n *   - column: The column number in the generated source, or null.\n */\nSourceMapConsumer.prototype.allGeneratedPositionsFor =\n  function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {\n    var line = util.getArg(aArgs, 'line');\n\n    // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping\n    // returns the index of the closest mapping less than the needle. By\n    // setting needle.originalColumn to 0, we thus find the last mapping for\n    // the given line, provided such a mapping exists.\n    var needle = {\n      source: util.getArg(aArgs, 'source'),\n      originalLine: line,\n      originalColumn: util.getArg(aArgs, 'column', 0)\n    };\n\n    if (this.sourceRoot != null) {\n      needle.source = util.relative(this.sourceRoot, needle.source);\n    }\n    if (!this._sources.has(needle.source)) {\n      return [];\n    }\n    needle.source = this._sources.indexOf(needle.source);\n\n    var mappings = [];\n\n    var index = this._findMapping(needle,\n                                  this._originalMappings,\n                                  \"originalLine\",\n                                  \"originalColumn\",\n                                  util.compareByOriginalPositions,\n                                  binarySearch.LEAST_UPPER_BOUND);\n    if (index >= 0) {\n      var mapping = this._originalMappings[index];\n\n      if (aArgs.column === undefined) {\n        var originalLine = mapping.originalLine;\n\n        // Iterate until either we run out of mappings, or we run into\n        // a mapping for a different line than the one we found. Since\n        // mappings are sorted, this is guaranteed to find all mappings for\n        // the line we found.\n        while (mapping && mapping.originalLine === originalLine) {\n          mappings.push({\n            line: util.getArg(mapping, 'generatedLine', null),\n            column: util.getArg(mapping, 'generatedColumn', null),\n            lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n          });\n\n          mapping = this._originalMappings[++index];\n        }\n      } else {\n        var originalColumn = mapping.originalColumn;\n\n        // Iterate until either we run out of mappings, or we run into\n        // a mapping for a different line than the one we were searching for.\n        // Since mappings are sorted, this is guaranteed to find all mappings for\n        // the line we are searching for.\n        while (mapping &&\n               mapping.originalLine === line &&\n               mapping.originalColumn == originalColumn) {\n          mappings.push({\n            line: util.getArg(mapping, 'generatedLine', null),\n            column: util.getArg(mapping, 'generatedColumn', null),\n            lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n          });\n\n          mapping = this._originalMappings[++index];\n        }\n      }\n    }\n\n    return mappings;\n  };\n\nexports.SourceMapConsumer = SourceMapConsumer;\n\n/**\n * A BasicSourceMapConsumer instance represents a parsed source map which we can\n * query for information about the original file positions by giving it a file\n * position in the generated source.\n *\n * The only parameter is the raw source map (either as a JSON string, or\n * already parsed to an object). According to the spec, source maps have the\n * following attributes:\n *\n *   - version: Which version of the source map spec this map is following.\n *   - sources: An array of URLs to the original source files.\n *   - names: An array of identifiers which can be referrenced by individual mappings.\n *   - sourceRoot: Optional. The URL root from which all sources are relative.\n *   - sourcesContent: Optional. An array of contents of the original source files.\n *   - mappings: A string of base64 VLQs which contain the actual mappings.\n *   - file: Optional. The generated file this source map is associated with.\n *\n * Here is an example source map, taken from the source map spec[0]:\n *\n *     {\n *       version : 3,\n *       file: \"out.js\",\n *       sourceRoot : \"\",\n *       sources: [\"foo.js\", \"bar.js\"],\n *       names: [\"src\", \"maps\", \"are\", \"fun\"],\n *       mappings: \"AA,AB;;ABCDE;\"\n *     }\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#\n */\nfunction BasicSourceMapConsumer(aSourceMap) {\n  var sourceMap = aSourceMap;\n  if (typeof aSourceMap === 'string') {\n    sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n  }\n\n  var version = util.getArg(sourceMap, 'version');\n  var sources = util.getArg(sourceMap, 'sources');\n  // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which\n  // requires the array) to play nice here.\n  var names = util.getArg(sourceMap, 'names', []);\n  var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);\n  var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);\n  var mappings = util.getArg(sourceMap, 'mappings');\n  var file = util.getArg(sourceMap, 'file', null);\n\n  // Once again, Sass deviates from the spec and supplies the version as a\n  // string rather than a number, so we use loose equality checking here.\n  if (version != this._version) {\n    throw new Error('Unsupported version: ' + version);\n  }\n\n  sources = sources\n    .map(String)\n    // Some source maps produce relative source paths like \"./foo.js\" instead of\n    // \"foo.js\".  Normalize these first so that future comparisons will succeed.\n    // See bugzil.la/1090768.\n    .map(util.normalize)\n    // Always ensure that absolute sources are internally stored relative to\n    // the source root, if the source root is absolute. Not doing this would\n    // be particularly problematic when the source root is a prefix of the\n    // source (valid, but why??). See github issue #199 and bugzil.la/1188982.\n    .map(function (source) {\n      return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)\n        ? util.relative(sourceRoot, source)\n        : source;\n    });\n\n  // Pass `true` below to allow duplicate names and sources. While source maps\n  // are intended to be compressed and deduplicated, the TypeScript compiler\n  // sometimes generates source maps with duplicates in them. See Github issue\n  // #72 and bugzil.la/889492.\n  this._names = ArraySet.fromArray(names.map(String), true);\n  this._sources = ArraySet.fromArray(sources, true);\n\n  this.sourceRoot = sourceRoot;\n  this.sourcesContent = sourcesContent;\n  this._mappings = mappings;\n  this.file = file;\n}\n\nBasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nBasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;\n\n/**\n * Create a BasicSourceMapConsumer from a SourceMapGenerator.\n *\n * @param SourceMapGenerator aSourceMap\n *        The source map that will be consumed.\n * @returns BasicSourceMapConsumer\n */\nBasicSourceMapConsumer.fromSourceMap =\n  function SourceMapConsumer_fromSourceMap(aSourceMap) {\n    var smc = Object.create(BasicSourceMapConsumer.prototype);\n\n    var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);\n    var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);\n    smc.sourceRoot = aSourceMap._sourceRoot;\n    smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),\n                                                            smc.sourceRoot);\n    smc.file = aSourceMap._file;\n\n    // Because we are modifying the entries (by converting string sources and\n    // names to indices into the sources and names ArraySets), we have to make\n    // a copy of the entry or else bad things happen. Shared mutable state\n    // strikes again! See github issue #191.\n\n    var generatedMappings = aSourceMap._mappings.toArray().slice();\n    var destGeneratedMappings = smc.__generatedMappings = [];\n    var destOriginalMappings = smc.__originalMappings = [];\n\n    for (var i = 0, length = generatedMappings.length; i < length; i++) {\n      var srcMapping = generatedMappings[i];\n      var destMapping = new Mapping;\n      destMapping.generatedLine = srcMapping.generatedLine;\n      destMapping.generatedColumn = srcMapping.generatedColumn;\n\n      if (srcMapping.source) {\n        destMapping.source = sources.indexOf(srcMapping.source);\n        destMapping.originalLine = srcMapping.originalLine;\n        destMapping.originalColumn = srcMapping.originalColumn;\n\n        if (srcMapping.name) {\n          destMapping.name = names.indexOf(srcMapping.name);\n        }\n\n        destOriginalMappings.push(destMapping);\n      }\n\n      destGeneratedMappings.push(destMapping);\n    }\n\n    quickSort(smc.__originalMappings, util.compareByOriginalPositions);\n\n    return smc;\n  };\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nBasicSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {\n  get: function () {\n    return this._sources.toArray().map(function (s) {\n      return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;\n    }, this);\n  }\n});\n\n/**\n * Provide the JIT with a nice shape / hidden class.\n */\nfunction Mapping() {\n  this.generatedLine = 0;\n  this.generatedColumn = 0;\n  this.source = null;\n  this.originalLine = null;\n  this.originalColumn = null;\n  this.name = null;\n}\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nBasicSourceMapConsumer.prototype._parseMappings =\n  function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n    var generatedLine = 1;\n    var previousGeneratedColumn = 0;\n    var previousOriginalLine = 0;\n    var previousOriginalColumn = 0;\n    var previousSource = 0;\n    var previousName = 0;\n    var length = aStr.length;\n    var index = 0;\n    var cachedSegments = {};\n    var temp = {};\n    var originalMappings = [];\n    var generatedMappings = [];\n    var mapping, str, segment, end, value;\n\n    while (index < length) {\n      if (aStr.charAt(index) === ';') {\n        generatedLine++;\n        index++;\n        previousGeneratedColumn = 0;\n      }\n      else if (aStr.charAt(index) === ',') {\n        index++;\n      }\n      else {\n        mapping = new Mapping();\n        mapping.generatedLine = generatedLine;\n\n        // Because each offset is encoded relative to the previous one,\n        // many segments often have the same encoding. We can exploit this\n        // fact by caching the parsed variable length fields of each segment,\n        // allowing us to avoid a second parse if we encounter the same\n        // segment again.\n        for (end = index; end < length; end++) {\n          if (this._charIsMappingSeparator(aStr, end)) {\n            break;\n          }\n        }\n        str = aStr.slice(index, end);\n\n        segment = cachedSegments[str];\n        if (segment) {\n          index += str.length;\n        } else {\n          segment = [];\n          while (index < end) {\n            base64VLQ.decode(aStr, index, temp);\n            value = temp.value;\n            index = temp.rest;\n            segment.push(value);\n          }\n\n          if (segment.length === 2) {\n            throw new Error('Found a source, but no line and column');\n          }\n\n          if (segment.length === 3) {\n            throw new Error('Found a source and line, but no column');\n          }\n\n          cachedSegments[str] = segment;\n        }\n\n        // Generated column.\n        mapping.generatedColumn = previousGeneratedColumn + segment[0];\n        previousGeneratedColumn = mapping.generatedColumn;\n\n        if (segment.length > 1) {\n          // Original source.\n          mapping.source = previousSource + segment[1];\n          previousSource += segment[1];\n\n          // Original line.\n          mapping.originalLine = previousOriginalLine + segment[2];\n          previousOriginalLine = mapping.originalLine;\n          // Lines are stored 0-based\n          mapping.originalLine += 1;\n\n          // Original column.\n          mapping.originalColumn = previousOriginalColumn + segment[3];\n          previousOriginalColumn = mapping.originalColumn;\n\n          if (segment.length > 4) {\n            // Original name.\n            mapping.name = previousName + segment[4];\n            previousName += segment[4];\n          }\n        }\n\n        generatedMappings.push(mapping);\n        if (typeof mapping.originalLine === 'number') {\n          originalMappings.push(mapping);\n        }\n      }\n    }\n\n    quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);\n    this.__generatedMappings = generatedMappings;\n\n    quickSort(originalMappings, util.compareByOriginalPositions);\n    this.__originalMappings = originalMappings;\n  };\n\n/**\n * Find the mapping that best matches the hypothetical \"needle\" mapping that\n * we are searching for in the given \"haystack\" of mappings.\n */\nBasicSourceMapConsumer.prototype._findMapping =\n  function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,\n                                         aColumnName, aComparator, aBias) {\n    // To return the position we are searching for, we must first find the\n    // mapping for the given position and then return the opposite position it\n    // points to. Because the mappings are sorted, we can use binary search to\n    // find the best mapping.\n\n    if (aNeedle[aLineName] <= 0) {\n      throw new TypeError('Line must be greater than or equal to 1, got '\n                          + aNeedle[aLineName]);\n    }\n    if (aNeedle[aColumnName] < 0) {\n      throw new TypeError('Column must be greater than or equal to 0, got '\n                          + aNeedle[aColumnName]);\n    }\n\n    return binarySearch.search(aNeedle, aMappings, aComparator, aBias);\n  };\n\n/**\n * Compute the last column for each generated mapping. The last column is\n * inclusive.\n */\nBasicSourceMapConsumer.prototype.computeColumnSpans =\n  function SourceMapConsumer_computeColumnSpans() {\n    for (var index = 0; index < this._generatedMappings.length; ++index) {\n      var mapping = this._generatedMappings[index];\n\n      // Mappings do not contain a field for the last generated columnt. We\n      // can come up with an optimistic estimate, however, by assuming that\n      // mappings are contiguous (i.e. given two consecutive mappings, the\n      // first mapping ends where the second one starts).\n      if (index + 1 < this._generatedMappings.length) {\n        var nextMapping = this._generatedMappings[index + 1];\n\n        if (mapping.generatedLine === nextMapping.generatedLine) {\n          mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;\n          continue;\n        }\n      }\n\n      // The last mapping for each line spans the entire line.\n      mapping.lastGeneratedColumn = Infinity;\n    }\n  };\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n *   - line: The line number in the generated source.\n *   - column: The column number in the generated source.\n *   - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n *     'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n *     Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n *   - source: The original source file, or null.\n *   - line: The line number in the original source, or null.\n *   - column: The column number in the original source, or null.\n *   - name: The original identifier, or null.\n */\nBasicSourceMapConsumer.prototype.originalPositionFor =\n  function SourceMapConsumer_originalPositionFor(aArgs) {\n    var needle = {\n      generatedLine: util.getArg(aArgs, 'line'),\n      generatedColumn: util.getArg(aArgs, 'column')\n    };\n\n    var index = this._findMapping(\n      needle,\n      this._generatedMappings,\n      \"generatedLine\",\n      \"generatedColumn\",\n      util.compareByGeneratedPositionsDeflated,\n      util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n    );\n\n    if (index >= 0) {\n      var mapping = this._generatedMappings[index];\n\n      if (mapping.generatedLine === needle.generatedLine) {\n        var source = util.getArg(mapping, 'source', null);\n        if (source !== null) {\n          source = this._sources.at(source);\n          if (this.sourceRoot != null) {\n            source = util.join(this.sourceRoot, source);\n          }\n        }\n        var name = util.getArg(mapping, 'name', null);\n        if (name !== null) {\n          name = this._names.at(name);\n        }\n        return {\n          source: source,\n          line: util.getArg(mapping, 'originalLine', null),\n          column: util.getArg(mapping, 'originalColumn', null),\n          name: name\n        };\n      }\n    }\n\n    return {\n      source: null,\n      line: null,\n      column: null,\n      name: null\n    };\n  };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nBasicSourceMapConsumer.prototype.hasContentsOfAllSources =\n  function BasicSourceMapConsumer_hasContentsOfAllSources() {\n    if (!this.sourcesContent) {\n      return false;\n    }\n    return this.sourcesContent.length >= this._sources.size() &&\n      !this.sourcesContent.some(function (sc) { return sc == null; });\n  };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nBasicSourceMapConsumer.prototype.sourceContentFor =\n  function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n    if (!this.sourcesContent) {\n      return null;\n    }\n\n    if (this.sourceRoot != null) {\n      aSource = util.relative(this.sourceRoot, aSource);\n    }\n\n    if (this._sources.has(aSource)) {\n      return this.sourcesContent[this._sources.indexOf(aSource)];\n    }\n\n    var url;\n    if (this.sourceRoot != null\n        && (url = util.urlParse(this.sourceRoot))) {\n      // XXX: file:// URIs and absolute paths lead to unexpected behavior for\n      // many users. We can help them out when they expect file:// URIs to\n      // behave like it would if they were running a local HTTP server. See\n      // https://bugzilla.mozilla.org/show_bug.cgi?id=885597.\n      var fileUriAbsPath = aSource.replace(/^file:\\/\\//, \"\");\n      if (url.scheme == \"file\"\n          && this._sources.has(fileUriAbsPath)) {\n        return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]\n      }\n\n      if ((!url.path || url.path == \"/\")\n          && this._sources.has(\"/\" + aSource)) {\n        return this.sourcesContent[this._sources.indexOf(\"/\" + aSource)];\n      }\n    }\n\n    // This function is used recursively from\n    // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we\n    // don't want to throw if we can't find the source - we just want to\n    // return null, so we provide a flag to exit gracefully.\n    if (nullOnMissing) {\n      return null;\n    }\n    else {\n      throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n    }\n  };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n *   - source: The filename of the original source.\n *   - line: The line number in the original source.\n *   - column: The column number in the original source.\n *   - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n *     'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n *     Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n *   - line: The line number in the generated source, or null.\n *   - column: The column number in the generated source, or null.\n */\nBasicSourceMapConsumer.prototype.generatedPositionFor =\n  function SourceMapConsumer_generatedPositionFor(aArgs) {\n    var source = util.getArg(aArgs, 'source');\n    if (this.sourceRoot != null) {\n      source = util.relative(this.sourceRoot, source);\n    }\n    if (!this._sources.has(source)) {\n      return {\n        line: null,\n        column: null,\n        lastColumn: null\n      };\n    }\n    source = this._sources.indexOf(source);\n\n    var needle = {\n      source: source,\n      originalLine: util.getArg(aArgs, 'line'),\n      originalColumn: util.getArg(aArgs, 'column')\n    };\n\n    var index = this._findMapping(\n      needle,\n      this._originalMappings,\n      \"originalLine\",\n      \"originalColumn\",\n      util.compareByOriginalPositions,\n      util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n    );\n\n    if (index >= 0) {\n      var mapping = this._originalMappings[index];\n\n      if (mapping.source === needle.source) {\n        return {\n          line: util.getArg(mapping, 'generatedLine', null),\n          column: util.getArg(mapping, 'generatedColumn', null),\n          lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n        };\n      }\n    }\n\n    return {\n      line: null,\n      column: null,\n      lastColumn: null\n    };\n  };\n\nexports.BasicSourceMapConsumer = BasicSourceMapConsumer;\n\n/**\n * An IndexedSourceMapConsumer instance represents a parsed source map which\n * we can query for information. It differs from BasicSourceMapConsumer in\n * that it takes \"indexed\" source maps (i.e. ones with a \"sections\" field) as\n * input.\n *\n * The only parameter is a raw source map (either as a JSON string, or already\n * parsed to an object). According to the spec for indexed source maps, they\n * have the following attributes:\n *\n *   - version: Which version of the source map spec this map is following.\n *   - file: Optional. The generated file this source map is associated with.\n *   - sections: A list of section definitions.\n *\n * Each value under the \"sections\" field has two fields:\n *   - offset: The offset into the original specified at which this section\n *       begins to apply, defined as an object with a \"line\" and \"column\"\n *       field.\n *   - map: A source map definition. This source map could also be indexed,\n *       but doesn't have to be.\n *\n * Instead of the \"map\" field, it's also possible to have a \"url\" field\n * specifying a URL to retrieve a source map from, but that's currently\n * unsupported.\n *\n * Here's an example source map, taken from the source map spec[0], but\n * modified to omit a section which uses the \"url\" field.\n *\n *  {\n *    version : 3,\n *    file: \"app.js\",\n *    sections: [{\n *      offset: {line:100, column:10},\n *      map: {\n *        version : 3,\n *        file: \"section.js\",\n *        sources: [\"foo.js\", \"bar.js\"],\n *        names: [\"src\", \"maps\", \"are\", \"fun\"],\n *        mappings: \"AAAA,E;;ABCDE;\"\n *      }\n *    }],\n *  }\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt\n */\nfunction IndexedSourceMapConsumer(aSourceMap) {\n  var sourceMap = aSourceMap;\n  if (typeof aSourceMap === 'string') {\n    sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n  }\n\n  var version = util.getArg(sourceMap, 'version');\n  var sections = util.getArg(sourceMap, 'sections');\n\n  if (version != this._version) {\n    throw new Error('Unsupported version: ' + version);\n  }\n\n  this._sources = new ArraySet();\n  this._names = new ArraySet();\n\n  var lastOffset = {\n    line: -1,\n    column: 0\n  };\n  this._sections = sections.map(function (s) {\n    if (s.url) {\n      // The url field will require support for asynchronicity.\n      // See https://github.com/mozilla/source-map/issues/16\n      throw new Error('Support for url field in sections not implemented.');\n    }\n    var offset = util.getArg(s, 'offset');\n    var offsetLine = util.getArg(offset, 'line');\n    var offsetColumn = util.getArg(offset, 'column');\n\n    if (offsetLine < lastOffset.line ||\n        (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {\n      throw new Error('Section offsets must be ordered and non-overlapping.');\n    }\n    lastOffset = offset;\n\n    return {\n      generatedOffset: {\n        // The offset fields are 0-based, but we use 1-based indices when\n        // encoding/decoding from VLQ.\n        generatedLine: offsetLine + 1,\n        generatedColumn: offsetColumn + 1\n      },\n      consumer: new SourceMapConsumer(util.getArg(s, 'map'))\n    }\n  });\n}\n\nIndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nIndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nIndexedSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {\n  get: function () {\n    var sources = [];\n    for (var i = 0; i < this._sections.length; i++) {\n      for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {\n        sources.push(this._sections[i].consumer.sources[j]);\n      }\n    }\n    return sources;\n  }\n});\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n *   - line: The line number in the generated source.\n *   - column: The column number in the generated source.\n *\n * and an object is returned with the following properties:\n *\n *   - source: The original source file, or null.\n *   - line: The line number in the original source, or null.\n *   - column: The column number in the original source, or null.\n *   - name: The original identifier, or null.\n */\nIndexedSourceMapConsumer.prototype.originalPositionFor =\n  function IndexedSourceMapConsumer_originalPositionFor(aArgs) {\n    var needle = {\n      generatedLine: util.getArg(aArgs, 'line'),\n      generatedColumn: util.getArg(aArgs, 'column')\n    };\n\n    // Find the section containing the generated position we're trying to map\n    // to an original position.\n    var sectionIndex = binarySearch.search(needle, this._sections,\n      function(needle, section) {\n        var cmp = needle.generatedLine - section.generatedOffset.generatedLine;\n        if (cmp) {\n          return cmp;\n        }\n\n        return (needle.generatedColumn -\n                section.generatedOffset.generatedColumn);\n      });\n    var section = this._sections[sectionIndex];\n\n    if (!section) {\n      return {\n        source: null,\n        line: null,\n        column: null,\n        name: null\n      };\n    }\n\n    return section.consumer.originalPositionFor({\n      line: needle.generatedLine -\n        (section.generatedOffset.generatedLine - 1),\n      column: needle.generatedColumn -\n        (section.generatedOffset.generatedLine === needle.generatedLine\n         ? section.generatedOffset.generatedColumn - 1\n         : 0),\n      bias: aArgs.bias\n    });\n  };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nIndexedSourceMapConsumer.prototype.hasContentsOfAllSources =\n  function IndexedSourceMapConsumer_hasContentsOfAllSources() {\n    return this._sections.every(function (s) {\n      return s.consumer.hasContentsOfAllSources();\n    });\n  };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nIndexedSourceMapConsumer.prototype.sourceContentFor =\n  function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n    for (var i = 0; i < this._sections.length; i++) {\n      var section = this._sections[i];\n\n      var content = section.consumer.sourceContentFor(aSource, true);\n      if (content) {\n        return content;\n      }\n    }\n    if (nullOnMissing) {\n      return null;\n    }\n    else {\n      throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n    }\n  };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n *   - source: The filename of the original source.\n *   - line: The line number in the original source.\n *   - column: The column number in the original source.\n *\n * and an object is returned with the following properties:\n *\n *   - line: The line number in the generated source, or null.\n *   - column: The column number in the generated source, or null.\n */\nIndexedSourceMapConsumer.prototype.generatedPositionFor =\n  function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {\n    for (var i = 0; i < this._sections.length; i++) {\n      var section = this._sections[i];\n\n      // Only consider this section if the requested source is in the list of\n      // sources of the consumer.\n      if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) {\n        continue;\n      }\n      var generatedPosition = section.consumer.generatedPositionFor(aArgs);\n      if (generatedPosition) {\n        var ret = {\n          line: generatedPosition.line +\n            (section.generatedOffset.generatedLine - 1),\n          column: generatedPosition.column +\n            (section.generatedOffset.generatedLine === generatedPosition.line\n             ? section.generatedOffset.generatedColumn - 1\n             : 0)\n        };\n        return ret;\n      }\n    }\n\n    return {\n      line: null,\n      column: null\n    };\n  };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nIndexedSourceMapConsumer.prototype._parseMappings =\n  function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n    this.__generatedMappings = [];\n    this.__originalMappings = [];\n    for (var i = 0; i < this._sections.length; i++) {\n      var section = this._sections[i];\n      var sectionMappings = section.consumer._generatedMappings;\n      for (var j = 0; j < sectionMappings.length; j++) {\n        var mapping = sectionMappings[j];\n\n        var source = section.consumer._sources.at(mapping.source);\n        if (section.consumer.sourceRoot !== null) {\n          source = util.join(section.consumer.sourceRoot, source);\n        }\n        this._sources.add(source);\n        source = this._sources.indexOf(source);\n\n        var name = section.consumer._names.at(mapping.name);\n        this._names.add(name);\n        name = this._names.indexOf(name);\n\n        // The mappings coming from the consumer for the section have\n        // generated positions relative to the start of the section, so we\n        // need to offset them to be relative to the start of the concatenated\n        // generated file.\n        var adjustedMapping = {\n          source: source,\n          generatedLine: mapping.generatedLine +\n            (section.generatedOffset.generatedLine - 1),\n          generatedColumn: mapping.generatedColumn +\n            (section.generatedOffset.generatedLine === mapping.generatedLine\n            ? section.generatedOffset.generatedColumn - 1\n            : 0),\n          originalLine: mapping.originalLine,\n          originalColumn: mapping.originalColumn,\n          name: name\n        };\n\n        this.__generatedMappings.push(adjustedMapping);\n        if (typeof adjustedMapping.originalLine === 'number') {\n          this.__originalMappings.push(adjustedMapping);\n        }\n      }\n    }\n\n    quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);\n    quickSort(this.__originalMappings, util.compareByOriginalPositions);\n  };\n\nexports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-map-consumer.js\n// module id = 7\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nexports.GREATEST_LOWER_BOUND = 1;\nexports.LEAST_UPPER_BOUND = 2;\n\n/**\n * Recursive implementation of binary search.\n *\n * @param aLow Indices here and lower do not contain the needle.\n * @param aHigh Indices here and higher do not contain the needle.\n * @param aNeedle The element being searched for.\n * @param aHaystack The non-empty array being searched.\n * @param aCompare Function which takes two elements and returns -1, 0, or 1.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n *     'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n */\nfunction recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {\n  // This function terminates when one of the following is true:\n  //\n  //   1. We find the exact element we are looking for.\n  //\n  //   2. We did not find the exact element, but we can return the index of\n  //      the next-closest element.\n  //\n  //   3. We did not find the exact element, and there is no next-closest\n  //      element than the one we are searching for, so we return -1.\n  var mid = Math.floor((aHigh - aLow) / 2) + aLow;\n  var cmp = aCompare(aNeedle, aHaystack[mid], true);\n  if (cmp === 0) {\n    // Found the element we are looking for.\n    return mid;\n  }\n  else if (cmp > 0) {\n    // Our needle is greater than aHaystack[mid].\n    if (aHigh - mid > 1) {\n      // The element is in the upper half.\n      return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);\n    }\n\n    // The exact needle element was not found in this haystack. Determine if\n    // we are in termination case (3) or (2) and return the appropriate thing.\n    if (aBias == exports.LEAST_UPPER_BOUND) {\n      return aHigh < aHaystack.length ? aHigh : -1;\n    } else {\n      return mid;\n    }\n  }\n  else {\n    // Our needle is less than aHaystack[mid].\n    if (mid - aLow > 1) {\n      // The element is in the lower half.\n      return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);\n    }\n\n    // we are in termination case (3) or (2) and return the appropriate thing.\n    if (aBias == exports.LEAST_UPPER_BOUND) {\n      return mid;\n    } else {\n      return aLow < 0 ? -1 : aLow;\n    }\n  }\n}\n\n/**\n * This is an implementation of binary search which will always try and return\n * the index of the closest element if there is no exact hit. This is because\n * mappings between original and generated line/col pairs are single points,\n * and there is an implicit region between each of them, so a miss just means\n * that you aren't on the very start of a region.\n *\n * @param aNeedle The element you are looking for.\n * @param aHaystack The array that is being searched.\n * @param aCompare A function which takes the needle and an element in the\n *     array and returns -1, 0, or 1 depending on whether the needle is less\n *     than, equal to, or greater than the element, respectively.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n *     'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n *     Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.\n */\nexports.search = function search(aNeedle, aHaystack, aCompare, aBias) {\n  if (aHaystack.length === 0) {\n    return -1;\n  }\n\n  var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,\n                              aCompare, aBias || exports.GREATEST_LOWER_BOUND);\n  if (index < 0) {\n    return -1;\n  }\n\n  // We have found either the exact element, or the next-closest element than\n  // the one we are searching for. However, there may be more than one such\n  // element. Make sure we always return the smallest of these.\n  while (index - 1 >= 0) {\n    if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {\n      break;\n    }\n    --index;\n  }\n\n  return index;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/binary-search.js\n// module id = 8\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n// It turns out that some (most?) JavaScript engines don't self-host\n// `Array.prototype.sort`. This makes sense because C++ will likely remain\n// faster than JS when doing raw CPU-intensive sorting. However, when using a\n// custom comparator function, calling back and forth between the VM's C++ and\n// JIT'd JS is rather slow *and* loses JIT type information, resulting in\n// worse generated code for the comparator function than would be optimal. In\n// fact, when sorting with a comparator, these costs outweigh the benefits of\n// sorting in C++. By using our own JS-implemented Quick Sort (below), we get\n// a ~3500ms mean speed-up in `bench/bench.html`.\n\n/**\n * Swap the elements indexed by `x` and `y` in the array `ary`.\n *\n * @param {Array} ary\n *        The array.\n * @param {Number} x\n *        The index of the first item.\n * @param {Number} y\n *        The index of the second item.\n */\nfunction swap(ary, x, y) {\n  var temp = ary[x];\n  ary[x] = ary[y];\n  ary[y] = temp;\n}\n\n/**\n * Returns a random integer within the range `low .. high` inclusive.\n *\n * @param {Number} low\n *        The lower bound on the range.\n * @param {Number} high\n *        The upper bound on the range.\n */\nfunction randomIntInRange(low, high) {\n  return Math.round(low + (Math.random() * (high - low)));\n}\n\n/**\n * The Quick Sort algorithm.\n *\n * @param {Array} ary\n *        An array to sort.\n * @param {function} comparator\n *        Function to use to compare two items.\n * @param {Number} p\n *        Start index of the array\n * @param {Number} r\n *        End index of the array\n */\nfunction doQuickSort(ary, comparator, p, r) {\n  // If our lower bound is less than our upper bound, we (1) partition the\n  // array into two pieces and (2) recurse on each half. If it is not, this is\n  // the empty array and our base case.\n\n  if (p < r) {\n    // (1) Partitioning.\n    //\n    // The partitioning chooses a pivot between `p` and `r` and moves all\n    // elements that are less than or equal to the pivot to the before it, and\n    // all the elements that are greater than it after it. The effect is that\n    // once partition is done, the pivot is in the exact place it will be when\n    // the array is put in sorted order, and it will not need to be moved\n    // again. This runs in O(n) time.\n\n    // Always choose a random pivot so that an input array which is reverse\n    // sorted does not cause O(n^2) running time.\n    var pivotIndex = randomIntInRange(p, r);\n    var i = p - 1;\n\n    swap(ary, pivotIndex, r);\n    var pivot = ary[r];\n\n    // Immediately after `j` is incremented in this loop, the following hold\n    // true:\n    //\n    //   * Every element in `ary[p .. i]` is less than or equal to the pivot.\n    //\n    //   * Every element in `ary[i+1 .. j-1]` is greater than the pivot.\n    for (var j = p; j < r; j++) {\n      if (comparator(ary[j], pivot) <= 0) {\n        i += 1;\n        swap(ary, i, j);\n      }\n    }\n\n    swap(ary, i + 1, j);\n    var q = i + 1;\n\n    // (2) Recurse on each half.\n\n    doQuickSort(ary, comparator, p, q - 1);\n    doQuickSort(ary, comparator, q + 1, r);\n  }\n}\n\n/**\n * Sort the given array in-place with the given comparator function.\n *\n * @param {Array} ary\n *        An array to sort.\n * @param {function} comparator\n *        Function to use to compare two items.\n */\nexports.quickSort = function (ary, comparator) {\n  doQuickSort(ary, comparator, 0, ary.length - 1);\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/quick-sort.js\n// module id = 9\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;\nvar util = require('./util');\n\n// Matches a Windows-style `\\r\\n` newline or a `\\n` newline used by all other\n// operating systems these days (capturing the result).\nvar REGEX_NEWLINE = /(\\r?\\n)/;\n\n// Newline character code for charCodeAt() comparisons\nvar NEWLINE_CODE = 10;\n\n// Private symbol for identifying `SourceNode`s when multiple versions of\n// the source-map library are loaded. This MUST NOT CHANGE across\n// versions!\nvar isSourceNode = \"$$$isSourceNode$$$\";\n\n/**\n * SourceNodes provide a way to abstract over interpolating/concatenating\n * snippets of generated JavaScript source code while maintaining the line and\n * column information associated with the original source code.\n *\n * @param aLine The original line number.\n * @param aColumn The original column number.\n * @param aSource The original source's filename.\n * @param aChunks Optional. An array of strings which are snippets of\n *        generated JS, or other SourceNodes.\n * @param aName The original identifier.\n */\nfunction SourceNode(aLine, aColumn, aSource, aChunks, aName) {\n  this.children = [];\n  this.sourceContents = {};\n  this.line = aLine == null ? null : aLine;\n  this.column = aColumn == null ? null : aColumn;\n  this.source = aSource == null ? null : aSource;\n  this.name = aName == null ? null : aName;\n  this[isSourceNode] = true;\n  if (aChunks != null) this.add(aChunks);\n}\n\n/**\n * Creates a SourceNode from generated code and a SourceMapConsumer.\n *\n * @param aGeneratedCode The generated code\n * @param aSourceMapConsumer The SourceMap for the generated code\n * @param aRelativePath Optional. The path that relative sources in the\n *        SourceMapConsumer should be relative to.\n */\nSourceNode.fromStringWithSourceMap =\n  function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {\n    // The SourceNode we want to fill with the generated code\n    // and the SourceMap\n    var node = new SourceNode();\n\n    // All even indices of this array are one line of the generated code,\n    // while all odd indices are the newlines between two adjacent lines\n    // (since `REGEX_NEWLINE` captures its match).\n    // Processed fragments are accessed by calling `shiftNextLine`.\n    var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);\n    var remainingLinesIndex = 0;\n    var shiftNextLine = function() {\n      var lineContents = getNextLine();\n      // The last line of a file might not have a newline.\n      var newLine = getNextLine() || \"\";\n      return lineContents + newLine;\n\n      function getNextLine() {\n        return remainingLinesIndex < remainingLines.length ?\n            remainingLines[remainingLinesIndex++] : undefined;\n      }\n    };\n\n    // We need to remember the position of \"remainingLines\"\n    var lastGeneratedLine = 1, lastGeneratedColumn = 0;\n\n    // The generate SourceNodes we need a code range.\n    // To extract it current and last mapping is used.\n    // Here we store the last mapping.\n    var lastMapping = null;\n\n    aSourceMapConsumer.eachMapping(function (mapping) {\n      if (lastMapping !== null) {\n        // We add the code from \"lastMapping\" to \"mapping\":\n        // First check if there is a new line in between.\n        if (lastGeneratedLine < mapping.generatedLine) {\n          // Associate first line with \"lastMapping\"\n          addMappingWithCode(lastMapping, shiftNextLine());\n          lastGeneratedLine++;\n          lastGeneratedColumn = 0;\n          // The remaining code is added without mapping\n        } else {\n          // There is no new line in between.\n          // Associate the code between \"lastGeneratedColumn\" and\n          // \"mapping.generatedColumn\" with \"lastMapping\"\n          var nextLine = remainingLines[remainingLinesIndex];\n          var code = nextLine.substr(0, mapping.generatedColumn -\n                                        lastGeneratedColumn);\n          remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn -\n                                              lastGeneratedColumn);\n          lastGeneratedColumn = mapping.generatedColumn;\n          addMappingWithCode(lastMapping, code);\n          // No more remaining code, continue\n          lastMapping = mapping;\n          return;\n        }\n      }\n      // We add the generated code until the first mapping\n      // to the SourceNode without any mapping.\n      // Each line is added as separate string.\n      while (lastGeneratedLine < mapping.generatedLine) {\n        node.add(shiftNextLine());\n        lastGeneratedLine++;\n      }\n      if (lastGeneratedColumn < mapping.generatedColumn) {\n        var nextLine = remainingLines[remainingLinesIndex];\n        node.add(nextLine.substr(0, mapping.generatedColumn));\n        remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn);\n        lastGeneratedColumn = mapping.generatedColumn;\n      }\n      lastMapping = mapping;\n    }, this);\n    // We have processed all mappings.\n    if (remainingLinesIndex < remainingLines.length) {\n      if (lastMapping) {\n        // Associate the remaining code in the current line with \"lastMapping\"\n        addMappingWithCode(lastMapping, shiftNextLine());\n      }\n      // and add the remaining lines without any mapping\n      node.add(remainingLines.splice(remainingLinesIndex).join(\"\"));\n    }\n\n    // Copy sourcesContent into SourceNode\n    aSourceMapConsumer.sources.forEach(function (sourceFile) {\n      var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n      if (content != null) {\n        if (aRelativePath != null) {\n          sourceFile = util.join(aRelativePath, sourceFile);\n        }\n        node.setSourceContent(sourceFile, content);\n      }\n    });\n\n    return node;\n\n    function addMappingWithCode(mapping, code) {\n      if (mapping === null || mapping.source === undefined) {\n        node.add(code);\n      } else {\n        var source = aRelativePath\n          ? util.join(aRelativePath, mapping.source)\n          : mapping.source;\n        node.add(new SourceNode(mapping.originalLine,\n                                mapping.originalColumn,\n                                source,\n                                code,\n                                mapping.name));\n      }\n    }\n  };\n\n/**\n * Add a chunk of generated JS to this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n *        SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.add = function SourceNode_add(aChunk) {\n  if (Array.isArray(aChunk)) {\n    aChunk.forEach(function (chunk) {\n      this.add(chunk);\n    }, this);\n  }\n  else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n    if (aChunk) {\n      this.children.push(aChunk);\n    }\n  }\n  else {\n    throw new TypeError(\n      \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n    );\n  }\n  return this;\n};\n\n/**\n * Add a chunk of generated JS to the beginning of this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n *        SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {\n  if (Array.isArray(aChunk)) {\n    for (var i = aChunk.length-1; i >= 0; i--) {\n      this.prepend(aChunk[i]);\n    }\n  }\n  else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n    this.children.unshift(aChunk);\n  }\n  else {\n    throw new TypeError(\n      \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n    );\n  }\n  return this;\n};\n\n/**\n * Walk over the tree of JS snippets in this node and its children. The\n * walking function is called once for each snippet of JS and is passed that\n * snippet and the its original associated source's line/column location.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walk = function SourceNode_walk(aFn) {\n  var chunk;\n  for (var i = 0, len = this.children.length; i < len; i++) {\n    chunk = this.children[i];\n    if (chunk[isSourceNode]) {\n      chunk.walk(aFn);\n    }\n    else {\n      if (chunk !== '') {\n        aFn(chunk, { source: this.source,\n                     line: this.line,\n                     column: this.column,\n                     name: this.name });\n      }\n    }\n  }\n};\n\n/**\n * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between\n * each of `this.children`.\n *\n * @param aSep The separator.\n */\nSourceNode.prototype.join = function SourceNode_join(aSep) {\n  var newChildren;\n  var i;\n  var len = this.children.length;\n  if (len > 0) {\n    newChildren = [];\n    for (i = 0; i < len-1; i++) {\n      newChildren.push(this.children[i]);\n      newChildren.push(aSep);\n    }\n    newChildren.push(this.children[i]);\n    this.children = newChildren;\n  }\n  return this;\n};\n\n/**\n * Call String.prototype.replace on the very right-most source snippet. Useful\n * for trimming whitespace from the end of a source node, etc.\n *\n * @param aPattern The pattern to replace.\n * @param aReplacement The thing to replace the pattern with.\n */\nSourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {\n  var lastChild = this.children[this.children.length - 1];\n  if (lastChild[isSourceNode]) {\n    lastChild.replaceRight(aPattern, aReplacement);\n  }\n  else if (typeof lastChild === 'string') {\n    this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);\n  }\n  else {\n    this.children.push(''.replace(aPattern, aReplacement));\n  }\n  return this;\n};\n\n/**\n * Set the source content for a source file. This will be added to the SourceMapGenerator\n * in the sourcesContent field.\n *\n * @param aSourceFile The filename of the source file\n * @param aSourceContent The content of the source file\n */\nSourceNode.prototype.setSourceContent =\n  function SourceNode_setSourceContent(aSourceFile, aSourceContent) {\n    this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;\n  };\n\n/**\n * Walk over the tree of SourceNodes. The walking function is called for each\n * source file content and is passed the filename and source content.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walkSourceContents =\n  function SourceNode_walkSourceContents(aFn) {\n    for (var i = 0, len = this.children.length; i < len; i++) {\n      if (this.children[i][isSourceNode]) {\n        this.children[i].walkSourceContents(aFn);\n      }\n    }\n\n    var sources = Object.keys(this.sourceContents);\n    for (var i = 0, len = sources.length; i < len; i++) {\n      aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);\n    }\n  };\n\n/**\n * Return the string representation of this source node. Walks over the tree\n * and concatenates all the various snippets together to one string.\n */\nSourceNode.prototype.toString = function SourceNode_toString() {\n  var str = \"\";\n  this.walk(function (chunk) {\n    str += chunk;\n  });\n  return str;\n};\n\n/**\n * Returns the string representation of this source node along with a source\n * map.\n */\nSourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {\n  var generated = {\n    code: \"\",\n    line: 1,\n    column: 0\n  };\n  var map = new SourceMapGenerator(aArgs);\n  var sourceMappingActive = false;\n  var lastOriginalSource = null;\n  var lastOriginalLine = null;\n  var lastOriginalColumn = null;\n  var lastOriginalName = null;\n  this.walk(function (chunk, original) {\n    generated.code += chunk;\n    if (original.source !== null\n        && original.line !== null\n        && original.column !== null) {\n      if(lastOriginalSource !== original.source\n         || lastOriginalLine !== original.line\n         || lastOriginalColumn !== original.column\n         || lastOriginalName !== original.name) {\n        map.addMapping({\n          source: original.source,\n          original: {\n            line: original.line,\n            column: original.column\n          },\n          generated: {\n            line: generated.line,\n            column: generated.column\n          },\n          name: original.name\n        });\n      }\n      lastOriginalSource = original.source;\n      lastOriginalLine = original.line;\n      lastOriginalColumn = original.column;\n      lastOriginalName = original.name;\n      sourceMappingActive = true;\n    } else if (sourceMappingActive) {\n      map.addMapping({\n        generated: {\n          line: generated.line,\n          column: generated.column\n        }\n      });\n      lastOriginalSource = null;\n      sourceMappingActive = false;\n    }\n    for (var idx = 0, length = chunk.length; idx < length; idx++) {\n      if (chunk.charCodeAt(idx) === NEWLINE_CODE) {\n        generated.line++;\n        generated.column = 0;\n        // Mappings end at eol\n        if (idx + 1 === length) {\n          lastOriginalSource = null;\n          sourceMappingActive = false;\n        } else if (sourceMappingActive) {\n          map.addMapping({\n            source: original.source,\n            original: {\n              line: original.line,\n              column: original.column\n            },\n            generated: {\n              line: generated.line,\n              column: generated.column\n            },\n            name: original.name\n          });\n        }\n      } else {\n        generated.column++;\n      }\n    }\n  });\n  this.walkSourceContents(function (sourceFile, sourceContent) {\n    map.setSourceContent(sourceFile, sourceContent);\n  });\n\n  return { code: generated.code, map: map };\n};\n\nexports.SourceNode = SourceNode;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-node.js\n// module id = 10\n// module chunks = 0"],"sourceRoot":""} \ No newline at end of file diff --git a/node_modules/source-map/dist/source-map.js b/node_modules/source-map/dist/source-map.js new file mode 100644 index 00000000..4e630e29 --- /dev/null +++ b/node_modules/source-map/dist/source-map.js @@ -0,0 +1,3090 @@ +(function webpackUniversalModuleDefinition(root, factory) { + if(typeof exports === 'object' && typeof module === 'object') + module.exports = factory(); + else if(typeof define === 'function' && define.amd) + define([], factory); + else if(typeof exports === 'object') + exports["sourceMap"] = factory(); + else + root["sourceMap"] = factory(); +})(this, function() { +return /******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; + +/******/ // The require function +/******/ function __webpack_require__(moduleId) { + +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) +/******/ return installedModules[moduleId].exports; + +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ exports: {}, +/******/ id: moduleId, +/******/ loaded: false +/******/ }; + +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); + +/******/ // Flag the module as loaded +/******/ module.loaded = true; + +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } + + +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; + +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; + +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; + +/******/ // Load entry module and return exports +/******/ return __webpack_require__(0); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, exports, __webpack_require__) { + + /* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ + exports.SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + exports.SourceMapConsumer = __webpack_require__(7).SourceMapConsumer; + exports.SourceNode = __webpack_require__(10).SourceNode; + + +/***/ }), +/* 1 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var base64VLQ = __webpack_require__(2); + var util = __webpack_require__(4); + var ArraySet = __webpack_require__(5).ArraySet; + var MappingList = __webpack_require__(6).MappingList; + + /** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ + function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; + } + + SourceMapGenerator.prototype._version = 3; + + /** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ + SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + + /** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ + SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + + /** + * Set the source content for a source file. + */ + SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + + /** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ + SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + + /** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ + SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { + throw new Error( + 'original.line and original.column are not numbers -- you probably meant to omit ' + + 'the original mapping entirely and only map the generated position. If so, pass ' + + 'null for the original mapping instead of an object with empty or null values.' + ); + } + + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + + /** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ + SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + + SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + + /** + * Externalize the source map. + */ + SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + + /** + * Render the source map being generated to a string. + */ + SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + + exports.SourceMapGenerator = SourceMapGenerator; + + +/***/ }), +/* 2 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + var base64 = __webpack_require__(3); + + // A single base 64 digit can contain 6 bits of data. For the base 64 variable + // length quantities we use in the source map spec, the first bit is the sign, + // the next four bits are the actual value, and the 6th bit is the + // continuation bit. The continuation bit tells us whether there are more + // digits in this value following this digit. + // + // Continuation + // | Sign + // | | + // V V + // 101011 + + var VLQ_BASE_SHIFT = 5; + + // binary: 100000 + var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + + // binary: 011111 + var VLQ_BASE_MASK = VLQ_BASE - 1; + + // binary: 100000 + var VLQ_CONTINUATION_BIT = VLQ_BASE; + + /** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ + function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; + } + + /** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ + function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; + } + + /** + * Returns the base 64 VLQ encoded value. + */ + exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; + }; + + /** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ + exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; + }; + + +/***/ }), +/* 3 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + + /** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ + exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); + }; + + /** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ + exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; + }; + + +/***/ }), +/* 4 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + /** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ + function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } + } + exports.getArg = getArg; + + var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/; + var dataUrlRegexp = /^data:.+\,.+$/; + + function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; + } + exports.urlParse = urlParse; + + function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; + } + exports.urlGenerate = urlGenerate; + + /** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ + function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; + } + exports.normalize = normalize; + + /** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ + function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; + } + exports.join = join; + + exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp); + }; + + /** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ + function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); + } + exports.relative = relative; + + var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); + }()); + + function identity (s) { + return s; + } + + /** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ + function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; + } + exports.toSetString = supportsNullProto ? identity : toSetString; + + function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; + } + exports.fromSetString = supportsNullProto ? identity : fromSetString; + + function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; + } + + /** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ + function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; + } + exports.compareByOriginalPositions = compareByOriginalPositions; + + /** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ + function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; + } + exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + + function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; + } + + /** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ + function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + + +/***/ }), +/* 5 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var has = Object.prototype.hasOwnProperty; + var hasNativeMap = typeof Map !== "undefined"; + + /** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ + function ArraySet() { + this._array = []; + this._set = hasNativeMap ? new Map() : Object.create(null); + } + + /** + * Static method for creating ArraySet instances from an existing array. + */ + ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; + }; + + /** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ + ArraySet.prototype.size = function ArraySet_size() { + return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; + }; + + /** + * Add the given string to this set. + * + * @param String aStr + */ + ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = hasNativeMap ? aStr : util.toSetString(aStr); + var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + if (hasNativeMap) { + this._set.set(aStr, idx); + } else { + this._set[sStr] = idx; + } + } + }; + + /** + * Is the given string a member of this set? + * + * @param String aStr + */ + ArraySet.prototype.has = function ArraySet_has(aStr) { + if (hasNativeMap) { + return this._set.has(aStr); + } else { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + } + }; + + /** + * What is the index of the given string in the array? + * + * @param String aStr + */ + ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + if (hasNativeMap) { + var idx = this._set.get(aStr); + if (idx >= 0) { + return idx; + } + } else { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + } + + throw new Error('"' + aStr + '" is not in the set.'); + }; + + /** + * What is the element at the given index? + * + * @param Number aIdx + */ + ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); + }; + + /** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ + ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); + }; + + exports.ArraySet = ArraySet; + + +/***/ }), +/* 6 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + + /** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ + function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; + } + + /** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ + function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; + } + + /** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ + MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + + /** + * Add the given source mapping. + * + * @param Object aMapping + */ + MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } + }; + + /** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ + MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; + }; + + exports.MappingList = MappingList; + + +/***/ }), +/* 7 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var binarySearch = __webpack_require__(8); + var ArraySet = __webpack_require__(5).ArraySet; + var base64VLQ = __webpack_require__(2); + var quickSort = __webpack_require__(9).quickSort; + + function SourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap) + : new BasicSourceMapConsumer(sourceMap); + } + + SourceMapConsumer.fromSourceMap = function(aSourceMap) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap); + } + + /** + * The version of the source mapping spec that we are consuming. + */ + SourceMapConsumer.prototype._version = 3; + + // `__generatedMappings` and `__originalMappings` are arrays that hold the + // parsed mapping coordinates from the source map's "mappings" attribute. They + // are lazily instantiated, accessed via the `_generatedMappings` and + // `_originalMappings` getters respectively, and we only parse the mappings + // and create these arrays once queried for a source location. We jump through + // these hoops because there can be many thousands of mappings, and parsing + // them is expensive, so we only want to do it if we must. + // + // Each object in the arrays is of the form: + // + // { + // generatedLine: The line number in the generated code, + // generatedColumn: The column number in the generated code, + // source: The path to the original source file that generated this + // chunk of code, + // originalLine: The line number in the original source that + // corresponds to this chunk of generated code, + // originalColumn: The column number in the original source that + // corresponds to this chunk of generated code, + // name: The name of the original symbol which generated this chunk of + // code. + // } + // + // All properties except for `generatedLine` and `generatedColumn` can be + // `null`. + // + // `_generatedMappings` is ordered by the generated positions. + // + // `_originalMappings` is ordered by the original positions. + + SourceMapConsumer.prototype.__generatedMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } + }); + + SourceMapConsumer.prototype.__originalMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } + }); + + SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + + SourceMapConsumer.GENERATED_ORDER = 1; + SourceMapConsumer.ORIGINAL_ORDER = 2; + + SourceMapConsumer.GREATEST_LOWER_BOUND = 1; + SourceMapConsumer.LEAST_UPPER_BOUND = 2; + + /** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ + SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + if (source != null && sourceRoot != null) { + source = util.join(sourceRoot, source); + } + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + + /** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: Optional. the column number in the original source. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + if (this.sourceRoot != null) { + needle.source = util.relative(this.sourceRoot, needle.source); + } + if (!this._sources.has(needle.source)) { + return []; + } + needle.source = this._sources.indexOf(needle.source); + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + + exports.SourceMapConsumer = SourceMapConsumer; + + /** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The only parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ + function BasicSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this.file = file; + } + + BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + + /** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @returns BasicSourceMapConsumer + */ + BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + + /** + * The version of the source mapping spec that we are consuming. + */ + BasicSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._sources.toArray().map(function (s) { + return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s; + }, this); + } + }); + + /** + * Provide the JIT with a nice shape / hidden class. + */ + function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; + } + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + + /** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ + BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + + /** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ + BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ + BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + if (this.sourceRoot != null) { + source = util.join(this.sourceRoot, source); + } + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + if (this.sourceRoot != null) { + aSource = util.relative(this.sourceRoot, aSource); + } + + if (this._sources.has(aSource)) { + return this.sourcesContent[this._sources.indexOf(aSource)]; + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = aSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + aSource)) { + return this.sourcesContent[this._sources.indexOf("/" + aSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + if (this.sourceRoot != null) { + source = util.relative(this.sourceRoot, source); + } + if (!this._sources.has(source)) { + return { + line: null, + column: null, + lastColumn: null + }; + } + source = this._sources.indexOf(source); + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + + exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + + /** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The only parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ + function IndexedSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map')) + } + }); + } + + IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + + /** + * The version of the source mapping spec that we are consuming. + */ + IndexedSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } + }); + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ + IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + if (section.consumer.sourceRoot !== null) { + source = util.join(section.consumer.sourceRoot, source); + } + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + + exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; + + +/***/ }), +/* 8 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + exports.GREATEST_LOWER_BOUND = 1; + exports.LEAST_UPPER_BOUND = 2; + + /** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ + function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } + } + + /** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ + exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; + }; + + +/***/ }), +/* 9 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + // It turns out that some (most?) JavaScript engines don't self-host + // `Array.prototype.sort`. This makes sense because C++ will likely remain + // faster than JS when doing raw CPU-intensive sorting. However, when using a + // custom comparator function, calling back and forth between the VM's C++ and + // JIT'd JS is rather slow *and* loses JIT type information, resulting in + // worse generated code for the comparator function than would be optimal. In + // fact, when sorting with a comparator, these costs outweigh the benefits of + // sorting in C++. By using our own JS-implemented Quick Sort (below), we get + // a ~3500ms mean speed-up in `bench/bench.html`. + + /** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ + function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; + } + + /** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ + function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); + } + + /** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ + function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } + } + + /** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ + exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); + }; + + +/***/ }), +/* 10 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + var util = __webpack_require__(4); + + // Matches a Windows-style `\r\n` newline or a `\n` newline used by all other + // operating systems these days (capturing the result). + var REGEX_NEWLINE = /(\r?\n)/; + + // Newline character code for charCodeAt() comparisons + var NEWLINE_CODE = 10; + + // Private symbol for identifying `SourceNode`s when multiple versions of + // the source-map library are loaded. This MUST NOT CHANGE across + // versions! + var isSourceNode = "$$$isSourceNode$$$"; + + /** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ + function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); + } + + /** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ + SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var remainingLinesIndex = 0; + var shiftNextLine = function() { + var lineContents = getNextLine(); + // The last line of a file might not have a newline. + var newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[remainingLinesIndex]; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[remainingLinesIndex]; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + + /** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } + }; + + /** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ + SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; + }; + + /** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ + SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; + }; + + /** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ + SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + + /** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + + /** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ + SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; + }; + + /** + * Returns the string representation of this source node along with a source + * map. + */ + SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; + }; + + exports.SourceNode = SourceNode; + + +/***/ }) +/******/ ]) +}); +; \ No newline at end of file diff --git a/node_modules/source-map/dist/source-map.min.js b/node_modules/source-map/dist/source-map.min.js new file mode 100644 index 00000000..f2a46bd0 --- /dev/null +++ b/node_modules/source-map/dist/source-map.min.js @@ -0,0 +1,2 @@ +!function(e,n){"object"==typeof exports&&"object"==typeof module?module.exports=n():"function"==typeof define&&define.amd?define([],n):"object"==typeof exports?exports.sourceMap=n():e.sourceMap=n()}(this,function(){return function(e){function n(t){if(r[t])return r[t].exports;var o=r[t]={exports:{},id:t,loaded:!1};return e[t].call(o.exports,o,o.exports,n),o.loaded=!0,o.exports}var r={};return n.m=e,n.c=r,n.p="",n(0)}([function(e,n,r){n.SourceMapGenerator=r(1).SourceMapGenerator,n.SourceMapConsumer=r(7).SourceMapConsumer,n.SourceNode=r(10).SourceNode},function(e,n,r){function t(e){e||(e={}),this._file=i.getArg(e,"file",null),this._sourceRoot=i.getArg(e,"sourceRoot",null),this._skipValidation=i.getArg(e,"skipValidation",!1),this._sources=new s,this._names=new s,this._mappings=new a,this._sourcesContents=null}var o=r(2),i=r(4),s=r(5).ArraySet,a=r(6).MappingList;t.prototype._version=3,t.fromSourceMap=function(e){var n=e.sourceRoot,r=new t({file:e.file,sourceRoot:n});return e.eachMapping(function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};null!=e.source&&(t.source=e.source,null!=n&&(t.source=i.relative(n,t.source)),t.original={line:e.originalLine,column:e.originalColumn},null!=e.name&&(t.name=e.name)),r.addMapping(t)}),e.sources.forEach(function(n){var t=e.sourceContentFor(n);null!=t&&r.setSourceContent(n,t)}),r},t.prototype.addMapping=function(e){var n=i.getArg(e,"generated"),r=i.getArg(e,"original",null),t=i.getArg(e,"source",null),o=i.getArg(e,"name",null);this._skipValidation||this._validateMapping(n,r,t,o),null!=t&&(t=String(t),this._sources.has(t)||this._sources.add(t)),null!=o&&(o=String(o),this._names.has(o)||this._names.add(o)),this._mappings.add({generatedLine:n.line,generatedColumn:n.column,originalLine:null!=r&&r.line,originalColumn:null!=r&&r.column,source:t,name:o})},t.prototype.setSourceContent=function(e,n){var r=e;null!=this._sourceRoot&&(r=i.relative(this._sourceRoot,r)),null!=n?(this._sourcesContents||(this._sourcesContents=Object.create(null)),this._sourcesContents[i.toSetString(r)]=n):this._sourcesContents&&(delete this._sourcesContents[i.toSetString(r)],0===Object.keys(this._sourcesContents).length&&(this._sourcesContents=null))},t.prototype.applySourceMap=function(e,n,r){var t=n;if(null==n){if(null==e.file)throw new Error('SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, or the source map\'s "file" property. Both were omitted.');t=e.file}var o=this._sourceRoot;null!=o&&(t=i.relative(o,t));var a=new s,u=new s;this._mappings.unsortedForEach(function(n){if(n.source===t&&null!=n.originalLine){var s=e.originalPositionFor({line:n.originalLine,column:n.originalColumn});null!=s.source&&(n.source=s.source,null!=r&&(n.source=i.join(r,n.source)),null!=o&&(n.source=i.relative(o,n.source)),n.originalLine=s.line,n.originalColumn=s.column,null!=s.name&&(n.name=s.name))}var l=n.source;null==l||a.has(l)||a.add(l);var c=n.name;null==c||u.has(c)||u.add(c)},this),this._sources=a,this._names=u,e.sources.forEach(function(n){var t=e.sourceContentFor(n);null!=t&&(null!=r&&(n=i.join(r,n)),null!=o&&(n=i.relative(o,n)),this.setSourceContent(n,t))},this)},t.prototype._validateMapping=function(e,n,r,t){if(n&&"number"!=typeof n.line&&"number"!=typeof n.column)throw new Error("original.line and original.column are not numbers -- you probably meant to omit the original mapping entirely and only map the generated position. If so, pass null for the original mapping instead of an object with empty or null values.");if((!(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0)||n||r||t)&&!(e&&"line"in e&&"column"in e&&n&&"line"in n&&"column"in n&&e.line>0&&e.column>=0&&n.line>0&&n.column>=0&&r))throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:r,original:n,name:t}))},t.prototype._serializeMappings=function(){for(var e,n,r,t,s=0,a=1,u=0,l=0,c=0,g=0,p="",h=this._mappings.toArray(),f=0,d=h.length;f0){if(!i.compareByGeneratedPositionsInflated(n,h[f-1]))continue;e+=","}e+=o.encode(n.generatedColumn-s),s=n.generatedColumn,null!=n.source&&(t=this._sources.indexOf(n.source),e+=o.encode(t-g),g=t,e+=o.encode(n.originalLine-1-l),l=n.originalLine-1,e+=o.encode(n.originalColumn-u),u=n.originalColumn,null!=n.name&&(r=this._names.indexOf(n.name),e+=o.encode(r-c),c=r)),p+=e}return p},t.prototype._generateSourcesContent=function(e,n){return e.map(function(e){if(!this._sourcesContents)return null;null!=n&&(e=i.relative(n,e));var r=i.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,r)?this._sourcesContents[r]:null},this)},t.prototype.toJSON=function(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};return null!=this._file&&(e.file=this._file),null!=this._sourceRoot&&(e.sourceRoot=this._sourceRoot),this._sourcesContents&&(e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)),e},t.prototype.toString=function(){return JSON.stringify(this.toJSON())},n.SourceMapGenerator=t},function(e,n,r){function t(e){return e<0?(-e<<1)+1:(e<<1)+0}function o(e){var n=1===(1&e),r=e>>1;return n?-r:r}var i=r(3),s=5,a=1<>>=s,o>0&&(n|=l),r+=i.encode(n);while(o>0);return r},n.decode=function(e,n,r){var t,a,c=e.length,g=0,p=0;do{if(n>=c)throw new Error("Expected more digits in base 64 VLQ value.");if(a=i.decode(e.charCodeAt(n++)),a===-1)throw new Error("Invalid base64 digit: "+e.charAt(n-1));t=!!(a&l),a&=u,g+=a<=0;c--)s=u[c],"."===s?u.splice(c,1):".."===s?l++:l>0&&(""===s?(u.splice(c+1,l),l=0):(u.splice(c,2),l--));return r=u.join("/"),""===r&&(r=a?"/":"."),i?(i.path=r,o(i)):r}function s(e,n){""===e&&(e="."),""===n&&(n=".");var r=t(n),s=t(e);if(s&&(e=s.path||"/"),r&&!r.scheme)return s&&(r.scheme=s.scheme),o(r);if(r||n.match(_))return n;if(s&&!s.host&&!s.path)return s.host=n,o(s);var a="/"===n.charAt(0)?n:i(e.replace(/\/+$/,"")+"/"+n);return s?(s.path=a,o(s)):a}function a(e,n){""===e&&(e="."),e=e.replace(/\/$/,"");for(var r=0;0!==n.indexOf(e+"/");){var t=e.lastIndexOf("/");if(t<0)return n;if(e=e.slice(0,t),e.match(/^([^\/]+:\/)?\/*$/))return n;++r}return Array(r+1).join("../")+n.substr(e.length+1)}function u(e){return e}function l(e){return g(e)?"$"+e:e}function c(e){return g(e)?e.slice(1):e}function g(e){if(!e)return!1;var n=e.length;if(n<9)return!1;if(95!==e.charCodeAt(n-1)||95!==e.charCodeAt(n-2)||111!==e.charCodeAt(n-3)||116!==e.charCodeAt(n-4)||111!==e.charCodeAt(n-5)||114!==e.charCodeAt(n-6)||112!==e.charCodeAt(n-7)||95!==e.charCodeAt(n-8)||95!==e.charCodeAt(n-9))return!1;for(var r=n-10;r>=0;r--)if(36!==e.charCodeAt(r))return!1;return!0}function p(e,n,r){var t=e.source-n.source;return 0!==t?t:(t=e.originalLine-n.originalLine,0!==t?t:(t=e.originalColumn-n.originalColumn,0!==t||r?t:(t=e.generatedColumn-n.generatedColumn,0!==t?t:(t=e.generatedLine-n.generatedLine,0!==t?t:e.name-n.name))))}function h(e,n,r){var t=e.generatedLine-n.generatedLine;return 0!==t?t:(t=e.generatedColumn-n.generatedColumn,0!==t||r?t:(t=e.source-n.source,0!==t?t:(t=e.originalLine-n.originalLine,0!==t?t:(t=e.originalColumn-n.originalColumn,0!==t?t:e.name-n.name))))}function f(e,n){return e===n?0:e>n?1:-1}function d(e,n){var r=e.generatedLine-n.generatedLine;return 0!==r?r:(r=e.generatedColumn-n.generatedColumn,0!==r?r:(r=f(e.source,n.source),0!==r?r:(r=e.originalLine-n.originalLine,0!==r?r:(r=e.originalColumn-n.originalColumn,0!==r?r:f(e.name,n.name)))))}n.getArg=r;var m=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/,_=/^data:.+\,.+$/;n.urlParse=t,n.urlGenerate=o,n.normalize=i,n.join=s,n.isAbsolute=function(e){return"/"===e.charAt(0)||!!e.match(m)},n.relative=a;var v=function(){var e=Object.create(null);return!("__proto__"in e)}();n.toSetString=v?u:l,n.fromSetString=v?u:c,n.compareByOriginalPositions=p,n.compareByGeneratedPositionsDeflated=h,n.compareByGeneratedPositionsInflated=d},function(e,n,r){function t(){this._array=[],this._set=s?new Map:Object.create(null)}var o=r(4),i=Object.prototype.hasOwnProperty,s="undefined"!=typeof Map;t.fromArray=function(e,n){for(var r=new t,o=0,i=e.length;o=0)return n}else{var r=o.toSetString(e);if(i.call(this._set,r))return this._set[r]}throw new Error('"'+e+'" is not in the set.')},t.prototype.at=function(e){if(e>=0&&er||t==r&&s>=o||i.compareByGeneratedPositionsInflated(e,n)<=0}function o(){this._array=[],this._sorted=!0,this._last={generatedLine:-1,generatedColumn:0}}var i=r(4);o.prototype.unsortedForEach=function(e,n){this._array.forEach(e,n)},o.prototype.add=function(e){t(this._last,e)?(this._last=e,this._array.push(e)):(this._sorted=!1,this._array.push(e))},o.prototype.toArray=function(){return this._sorted||(this._array.sort(i.compareByGeneratedPositionsInflated),this._sorted=!0),this._array},n.MappingList=o},function(e,n,r){function t(e){var n=e;return"string"==typeof e&&(n=JSON.parse(e.replace(/^\)\]\}'/,""))),null!=n.sections?new s(n):new o(n)}function o(e){var n=e;"string"==typeof e&&(n=JSON.parse(e.replace(/^\)\]\}'/,"")));var r=a.getArg(n,"version"),t=a.getArg(n,"sources"),o=a.getArg(n,"names",[]),i=a.getArg(n,"sourceRoot",null),s=a.getArg(n,"sourcesContent",null),u=a.getArg(n,"mappings"),c=a.getArg(n,"file",null);if(r!=this._version)throw new Error("Unsupported version: "+r);t=t.map(String).map(a.normalize).map(function(e){return i&&a.isAbsolute(i)&&a.isAbsolute(e)?a.relative(i,e):e}),this._names=l.fromArray(o.map(String),!0),this._sources=l.fromArray(t,!0),this.sourceRoot=i,this.sourcesContent=s,this._mappings=u,this.file=c}function i(){this.generatedLine=0,this.generatedColumn=0,this.source=null,this.originalLine=null,this.originalColumn=null,this.name=null}function s(e){var n=e;"string"==typeof e&&(n=JSON.parse(e.replace(/^\)\]\}'/,"")));var r=a.getArg(n,"version"),o=a.getArg(n,"sections");if(r!=this._version)throw new Error("Unsupported version: "+r);this._sources=new l,this._names=new l;var i={line:-1,column:0};this._sections=o.map(function(e){if(e.url)throw new Error("Support for url field in sections not implemented.");var n=a.getArg(e,"offset"),r=a.getArg(n,"line"),o=a.getArg(n,"column");if(r=0){var i=this._originalMappings[o];if(void 0===e.column)for(var s=i.originalLine;i&&i.originalLine===s;)t.push({line:a.getArg(i,"generatedLine",null),column:a.getArg(i,"generatedColumn",null),lastColumn:a.getArg(i,"lastGeneratedColumn",null)}),i=this._originalMappings[++o];else for(var l=i.originalColumn;i&&i.originalLine===n&&i.originalColumn==l;)t.push({line:a.getArg(i,"generatedLine",null),column:a.getArg(i,"generatedColumn",null),lastColumn:a.getArg(i,"lastGeneratedColumn",null)}),i=this._originalMappings[++o]}return t},n.SourceMapConsumer=t,o.prototype=Object.create(t.prototype),o.prototype.consumer=t,o.fromSourceMap=function(e){var n=Object.create(o.prototype),r=n._names=l.fromArray(e._names.toArray(),!0),t=n._sources=l.fromArray(e._sources.toArray(),!0);n.sourceRoot=e._sourceRoot,n.sourcesContent=e._generateSourcesContent(n._sources.toArray(),n.sourceRoot),n.file=e._file;for(var s=e._mappings.toArray().slice(),u=n.__generatedMappings=[],c=n.__originalMappings=[],p=0,h=s.length;p1&&(r.source=d+o[1],d+=o[1],r.originalLine=h+o[2],h=r.originalLine,r.originalLine+=1,r.originalColumn=f+o[3],f=r.originalColumn,o.length>4&&(r.name=m+o[4],m+=o[4])),S.push(r),"number"==typeof r.originalLine&&A.push(r)}g(S,a.compareByGeneratedPositionsDeflated),this.__generatedMappings=S,g(A,a.compareByOriginalPositions),this.__originalMappings=A},o.prototype._findMapping=function(e,n,r,t,o,i){if(e[r]<=0)throw new TypeError("Line must be greater than or equal to 1, got "+e[r]);if(e[t]<0)throw new TypeError("Column must be greater than or equal to 0, got "+e[t]);return u.search(e,n,o,i)},o.prototype.computeColumnSpans=function(){for(var e=0;e=0){var o=this._generatedMappings[r];if(o.generatedLine===n.generatedLine){var i=a.getArg(o,"source",null);null!==i&&(i=this._sources.at(i),null!=this.sourceRoot&&(i=a.join(this.sourceRoot,i)));var s=a.getArg(o,"name",null);return null!==s&&(s=this._names.at(s)),{source:i,line:a.getArg(o,"originalLine",null),column:a.getArg(o,"originalColumn",null),name:s}}}return{source:null,line:null,column:null,name:null}},o.prototype.hasContentsOfAllSources=function(){return!!this.sourcesContent&&(this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some(function(e){return null==e}))},o.prototype.sourceContentFor=function(e,n){if(!this.sourcesContent)return null;if(null!=this.sourceRoot&&(e=a.relative(this.sourceRoot,e)),this._sources.has(e))return this.sourcesContent[this._sources.indexOf(e)];var r;if(null!=this.sourceRoot&&(r=a.urlParse(this.sourceRoot))){var t=e.replace(/^file:\/\//,"");if("file"==r.scheme&&this._sources.has(t))return this.sourcesContent[this._sources.indexOf(t)];if((!r.path||"/"==r.path)&&this._sources.has("/"+e))return this.sourcesContent[this._sources.indexOf("/"+e)]}if(n)return null;throw new Error('"'+e+'" is not in the SourceMap.')},o.prototype.generatedPositionFor=function(e){var n=a.getArg(e,"source");if(null!=this.sourceRoot&&(n=a.relative(this.sourceRoot,n)),!this._sources.has(n))return{line:null,column:null,lastColumn:null};n=this._sources.indexOf(n);var r={source:n,originalLine:a.getArg(e,"line"),originalColumn:a.getArg(e,"column")},o=this._findMapping(r,this._originalMappings,"originalLine","originalColumn",a.compareByOriginalPositions,a.getArg(e,"bias",t.GREATEST_LOWER_BOUND));if(o>=0){var i=this._originalMappings[o];if(i.source===r.source)return{line:a.getArg(i,"generatedLine",null),column:a.getArg(i,"generatedColumn",null),lastColumn:a.getArg(i,"lastGeneratedColumn",null)}}return{line:null,column:null,lastColumn:null}},n.BasicSourceMapConsumer=o,s.prototype=Object.create(t.prototype),s.prototype.constructor=t,s.prototype._version=3,Object.defineProperty(s.prototype,"sources",{get:function(){for(var e=[],n=0;n0?t-u>1?r(u,t,o,i,s,a):a==n.LEAST_UPPER_BOUND?t1?r(e,u,o,i,s,a):a==n.LEAST_UPPER_BOUND?u:e<0?-1:e}n.GREATEST_LOWER_BOUND=1,n.LEAST_UPPER_BOUND=2,n.search=function(e,t,o,i){if(0===t.length)return-1;var s=r(-1,t.length,e,t,o,i||n.GREATEST_LOWER_BOUND);if(s<0)return-1;for(;s-1>=0&&0===o(t[s],t[s-1],!0);)--s;return s}},function(e,n){function r(e,n,r){var t=e[n];e[n]=e[r],e[r]=t}function t(e,n){return Math.round(e+Math.random()*(n-e))}function o(e,n,i,s){if(i=0;n--)this.prepend(e[n]);else{if(!e[u]&&"string"!=typeof e)throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e);this.children.unshift(e)}return this},t.prototype.walk=function(e){for(var n,r=0,t=this.children.length;r0){for(n=[],r=0;r 0 && aGenerated.column >= 0\n\t && !aOriginal && !aSource && !aName) {\n\t // Case 1.\n\t return;\n\t }\n\t else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n\t && aOriginal && 'line' in aOriginal && 'column' in aOriginal\n\t && aGenerated.line > 0 && aGenerated.column >= 0\n\t && aOriginal.line > 0 && aOriginal.column >= 0\n\t && aSource) {\n\t // Cases 2 and 3.\n\t return;\n\t }\n\t else {\n\t throw new Error('Invalid mapping: ' + JSON.stringify({\n\t generated: aGenerated,\n\t source: aSource,\n\t original: aOriginal,\n\t name: aName\n\t }));\n\t }\n\t };\n\t\n\t/**\n\t * Serialize the accumulated mappings in to the stream of base 64 VLQs\n\t * specified by the source map format.\n\t */\n\tSourceMapGenerator.prototype._serializeMappings =\n\t function SourceMapGenerator_serializeMappings() {\n\t var previousGeneratedColumn = 0;\n\t var previousGeneratedLine = 1;\n\t var previousOriginalColumn = 0;\n\t var previousOriginalLine = 0;\n\t var previousName = 0;\n\t var previousSource = 0;\n\t var result = '';\n\t var next;\n\t var mapping;\n\t var nameIdx;\n\t var sourceIdx;\n\t\n\t var mappings = this._mappings.toArray();\n\t for (var i = 0, len = mappings.length; i < len; i++) {\n\t mapping = mappings[i];\n\t next = ''\n\t\n\t if (mapping.generatedLine !== previousGeneratedLine) {\n\t previousGeneratedColumn = 0;\n\t while (mapping.generatedLine !== previousGeneratedLine) {\n\t next += ';';\n\t previousGeneratedLine++;\n\t }\n\t }\n\t else {\n\t if (i > 0) {\n\t if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {\n\t continue;\n\t }\n\t next += ',';\n\t }\n\t }\n\t\n\t next += base64VLQ.encode(mapping.generatedColumn\n\t - previousGeneratedColumn);\n\t previousGeneratedColumn = mapping.generatedColumn;\n\t\n\t if (mapping.source != null) {\n\t sourceIdx = this._sources.indexOf(mapping.source);\n\t next += base64VLQ.encode(sourceIdx - previousSource);\n\t previousSource = sourceIdx;\n\t\n\t // lines are stored 0-based in SourceMap spec version 3\n\t next += base64VLQ.encode(mapping.originalLine - 1\n\t - previousOriginalLine);\n\t previousOriginalLine = mapping.originalLine - 1;\n\t\n\t next += base64VLQ.encode(mapping.originalColumn\n\t - previousOriginalColumn);\n\t previousOriginalColumn = mapping.originalColumn;\n\t\n\t if (mapping.name != null) {\n\t nameIdx = this._names.indexOf(mapping.name);\n\t next += base64VLQ.encode(nameIdx - previousName);\n\t previousName = nameIdx;\n\t }\n\t }\n\t\n\t result += next;\n\t }\n\t\n\t return result;\n\t };\n\t\n\tSourceMapGenerator.prototype._generateSourcesContent =\n\t function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {\n\t return aSources.map(function (source) {\n\t if (!this._sourcesContents) {\n\t return null;\n\t }\n\t if (aSourceRoot != null) {\n\t source = util.relative(aSourceRoot, source);\n\t }\n\t var key = util.toSetString(source);\n\t return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)\n\t ? this._sourcesContents[key]\n\t : null;\n\t }, this);\n\t };\n\t\n\t/**\n\t * Externalize the source map.\n\t */\n\tSourceMapGenerator.prototype.toJSON =\n\t function SourceMapGenerator_toJSON() {\n\t var map = {\n\t version: this._version,\n\t sources: this._sources.toArray(),\n\t names: this._names.toArray(),\n\t mappings: this._serializeMappings()\n\t };\n\t if (this._file != null) {\n\t map.file = this._file;\n\t }\n\t if (this._sourceRoot != null) {\n\t map.sourceRoot = this._sourceRoot;\n\t }\n\t if (this._sourcesContents) {\n\t map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);\n\t }\n\t\n\t return map;\n\t };\n\t\n\t/**\n\t * Render the source map being generated to a string.\n\t */\n\tSourceMapGenerator.prototype.toString =\n\t function SourceMapGenerator_toString() {\n\t return JSON.stringify(this.toJSON());\n\t };\n\t\n\texports.SourceMapGenerator = SourceMapGenerator;\n\n\n/***/ }),\n/* 2 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t *\n\t * Based on the Base 64 VLQ implementation in Closure Compiler:\n\t * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java\n\t *\n\t * Copyright 2011 The Closure Compiler Authors. All rights reserved.\n\t * Redistribution and use in source and binary forms, with or without\n\t * modification, are permitted provided that the following conditions are\n\t * met:\n\t *\n\t * * Redistributions of source code must retain the above copyright\n\t * notice, this list of conditions and the following disclaimer.\n\t * * Redistributions in binary form must reproduce the above\n\t * copyright notice, this list of conditions and the following\n\t * disclaimer in the documentation and/or other materials provided\n\t * with the distribution.\n\t * * Neither the name of Google Inc. nor the names of its\n\t * contributors may be used to endorse or promote products derived\n\t * from this software without specific prior written permission.\n\t *\n\t * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\t * \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n\t * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n\t * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n\t * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n\t * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n\t * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n\t * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n\t * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n\t * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\t * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\t */\n\t\n\tvar base64 = __webpack_require__(3);\n\t\n\t// A single base 64 digit can contain 6 bits of data. For the base 64 variable\n\t// length quantities we use in the source map spec, the first bit is the sign,\n\t// the next four bits are the actual value, and the 6th bit is the\n\t// continuation bit. The continuation bit tells us whether there are more\n\t// digits in this value following this digit.\n\t//\n\t// Continuation\n\t// | Sign\n\t// | |\n\t// V V\n\t// 101011\n\t\n\tvar VLQ_BASE_SHIFT = 5;\n\t\n\t// binary: 100000\n\tvar VLQ_BASE = 1 << VLQ_BASE_SHIFT;\n\t\n\t// binary: 011111\n\tvar VLQ_BASE_MASK = VLQ_BASE - 1;\n\t\n\t// binary: 100000\n\tvar VLQ_CONTINUATION_BIT = VLQ_BASE;\n\t\n\t/**\n\t * Converts from a two-complement value to a value where the sign bit is\n\t * placed in the least significant bit. For example, as decimals:\n\t * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)\n\t * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)\n\t */\n\tfunction toVLQSigned(aValue) {\n\t return aValue < 0\n\t ? ((-aValue) << 1) + 1\n\t : (aValue << 1) + 0;\n\t}\n\t\n\t/**\n\t * Converts to a two-complement value from a value where the sign bit is\n\t * placed in the least significant bit. For example, as decimals:\n\t * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1\n\t * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2\n\t */\n\tfunction fromVLQSigned(aValue) {\n\t var isNegative = (aValue & 1) === 1;\n\t var shifted = aValue >> 1;\n\t return isNegative\n\t ? -shifted\n\t : shifted;\n\t}\n\t\n\t/**\n\t * Returns the base 64 VLQ encoded value.\n\t */\n\texports.encode = function base64VLQ_encode(aValue) {\n\t var encoded = \"\";\n\t var digit;\n\t\n\t var vlq = toVLQSigned(aValue);\n\t\n\t do {\n\t digit = vlq & VLQ_BASE_MASK;\n\t vlq >>>= VLQ_BASE_SHIFT;\n\t if (vlq > 0) {\n\t // There are still more digits in this value, so we must make sure the\n\t // continuation bit is marked.\n\t digit |= VLQ_CONTINUATION_BIT;\n\t }\n\t encoded += base64.encode(digit);\n\t } while (vlq > 0);\n\t\n\t return encoded;\n\t};\n\t\n\t/**\n\t * Decodes the next base 64 VLQ value from the given string and returns the\n\t * value and the rest of the string via the out parameter.\n\t */\n\texports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {\n\t var strLen = aStr.length;\n\t var result = 0;\n\t var shift = 0;\n\t var continuation, digit;\n\t\n\t do {\n\t if (aIndex >= strLen) {\n\t throw new Error(\"Expected more digits in base 64 VLQ value.\");\n\t }\n\t\n\t digit = base64.decode(aStr.charCodeAt(aIndex++));\n\t if (digit === -1) {\n\t throw new Error(\"Invalid base64 digit: \" + aStr.charAt(aIndex - 1));\n\t }\n\t\n\t continuation = !!(digit & VLQ_CONTINUATION_BIT);\n\t digit &= VLQ_BASE_MASK;\n\t result = result + (digit << shift);\n\t shift += VLQ_BASE_SHIFT;\n\t } while (continuation);\n\t\n\t aOutParam.value = fromVLQSigned(result);\n\t aOutParam.rest = aIndex;\n\t};\n\n\n/***/ }),\n/* 3 */\n/***/ (function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');\n\t\n\t/**\n\t * Encode an integer in the range of 0 to 63 to a single base 64 digit.\n\t */\n\texports.encode = function (number) {\n\t if (0 <= number && number < intToCharMap.length) {\n\t return intToCharMap[number];\n\t }\n\t throw new TypeError(\"Must be between 0 and 63: \" + number);\n\t};\n\t\n\t/**\n\t * Decode a single base 64 character code digit to an integer. Returns -1 on\n\t * failure.\n\t */\n\texports.decode = function (charCode) {\n\t var bigA = 65; // 'A'\n\t var bigZ = 90; // 'Z'\n\t\n\t var littleA = 97; // 'a'\n\t var littleZ = 122; // 'z'\n\t\n\t var zero = 48; // '0'\n\t var nine = 57; // '9'\n\t\n\t var plus = 43; // '+'\n\t var slash = 47; // '/'\n\t\n\t var littleOffset = 26;\n\t var numberOffset = 52;\n\t\n\t // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ\n\t if (bigA <= charCode && charCode <= bigZ) {\n\t return (charCode - bigA);\n\t }\n\t\n\t // 26 - 51: abcdefghijklmnopqrstuvwxyz\n\t if (littleA <= charCode && charCode <= littleZ) {\n\t return (charCode - littleA + littleOffset);\n\t }\n\t\n\t // 52 - 61: 0123456789\n\t if (zero <= charCode && charCode <= nine) {\n\t return (charCode - zero + numberOffset);\n\t }\n\t\n\t // 62: +\n\t if (charCode == plus) {\n\t return 62;\n\t }\n\t\n\t // 63: /\n\t if (charCode == slash) {\n\t return 63;\n\t }\n\t\n\t // Invalid base64 digit.\n\t return -1;\n\t};\n\n\n/***/ }),\n/* 4 */\n/***/ (function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\t/**\n\t * This is a helper function for getting values from parameter/options\n\t * objects.\n\t *\n\t * @param args The object we are extracting values from\n\t * @param name The name of the property we are getting.\n\t * @param defaultValue An optional value to return if the property is missing\n\t * from the object. If this is not specified and the property is missing, an\n\t * error will be thrown.\n\t */\n\tfunction getArg(aArgs, aName, aDefaultValue) {\n\t if (aName in aArgs) {\n\t return aArgs[aName];\n\t } else if (arguments.length === 3) {\n\t return aDefaultValue;\n\t } else {\n\t throw new Error('\"' + aName + '\" is a required argument.');\n\t }\n\t}\n\texports.getArg = getArg;\n\t\n\tvar urlRegexp = /^(?:([\\w+\\-.]+):)?\\/\\/(?:(\\w+:\\w+)@)?([\\w.]*)(?::(\\d+))?(\\S*)$/;\n\tvar dataUrlRegexp = /^data:.+\\,.+$/;\n\t\n\tfunction urlParse(aUrl) {\n\t var match = aUrl.match(urlRegexp);\n\t if (!match) {\n\t return null;\n\t }\n\t return {\n\t scheme: match[1],\n\t auth: match[2],\n\t host: match[3],\n\t port: match[4],\n\t path: match[5]\n\t };\n\t}\n\texports.urlParse = urlParse;\n\t\n\tfunction urlGenerate(aParsedUrl) {\n\t var url = '';\n\t if (aParsedUrl.scheme) {\n\t url += aParsedUrl.scheme + ':';\n\t }\n\t url += '//';\n\t if (aParsedUrl.auth) {\n\t url += aParsedUrl.auth + '@';\n\t }\n\t if (aParsedUrl.host) {\n\t url += aParsedUrl.host;\n\t }\n\t if (aParsedUrl.port) {\n\t url += \":\" + aParsedUrl.port\n\t }\n\t if (aParsedUrl.path) {\n\t url += aParsedUrl.path;\n\t }\n\t return url;\n\t}\n\texports.urlGenerate = urlGenerate;\n\t\n\t/**\n\t * Normalizes a path, or the path portion of a URL:\n\t *\n\t * - Replaces consecutive slashes with one slash.\n\t * - Removes unnecessary '.' parts.\n\t * - Removes unnecessary '/..' parts.\n\t *\n\t * Based on code in the Node.js 'path' core module.\n\t *\n\t * @param aPath The path or url to normalize.\n\t */\n\tfunction normalize(aPath) {\n\t var path = aPath;\n\t var url = urlParse(aPath);\n\t if (url) {\n\t if (!url.path) {\n\t return aPath;\n\t }\n\t path = url.path;\n\t }\n\t var isAbsolute = exports.isAbsolute(path);\n\t\n\t var parts = path.split(/\\/+/);\n\t for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {\n\t part = parts[i];\n\t if (part === '.') {\n\t parts.splice(i, 1);\n\t } else if (part === '..') {\n\t up++;\n\t } else if (up > 0) {\n\t if (part === '') {\n\t // The first part is blank if the path is absolute. Trying to go\n\t // above the root is a no-op. Therefore we can remove all '..' parts\n\t // directly after the root.\n\t parts.splice(i + 1, up);\n\t up = 0;\n\t } else {\n\t parts.splice(i, 2);\n\t up--;\n\t }\n\t }\n\t }\n\t path = parts.join('/');\n\t\n\t if (path === '') {\n\t path = isAbsolute ? '/' : '.';\n\t }\n\t\n\t if (url) {\n\t url.path = path;\n\t return urlGenerate(url);\n\t }\n\t return path;\n\t}\n\texports.normalize = normalize;\n\t\n\t/**\n\t * Joins two paths/URLs.\n\t *\n\t * @param aRoot The root path or URL.\n\t * @param aPath The path or URL to be joined with the root.\n\t *\n\t * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a\n\t * scheme-relative URL: Then the scheme of aRoot, if any, is prepended\n\t * first.\n\t * - Otherwise aPath is a path. If aRoot is a URL, then its path portion\n\t * is updated with the result and aRoot is returned. Otherwise the result\n\t * is returned.\n\t * - If aPath is absolute, the result is aPath.\n\t * - Otherwise the two paths are joined with a slash.\n\t * - Joining for example 'http://' and 'www.example.com' is also supported.\n\t */\n\tfunction join(aRoot, aPath) {\n\t if (aRoot === \"\") {\n\t aRoot = \".\";\n\t }\n\t if (aPath === \"\") {\n\t aPath = \".\";\n\t }\n\t var aPathUrl = urlParse(aPath);\n\t var aRootUrl = urlParse(aRoot);\n\t if (aRootUrl) {\n\t aRoot = aRootUrl.path || '/';\n\t }\n\t\n\t // `join(foo, '//www.example.org')`\n\t if (aPathUrl && !aPathUrl.scheme) {\n\t if (aRootUrl) {\n\t aPathUrl.scheme = aRootUrl.scheme;\n\t }\n\t return urlGenerate(aPathUrl);\n\t }\n\t\n\t if (aPathUrl || aPath.match(dataUrlRegexp)) {\n\t return aPath;\n\t }\n\t\n\t // `join('http://', 'www.example.com')`\n\t if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {\n\t aRootUrl.host = aPath;\n\t return urlGenerate(aRootUrl);\n\t }\n\t\n\t var joined = aPath.charAt(0) === '/'\n\t ? aPath\n\t : normalize(aRoot.replace(/\\/+$/, '') + '/' + aPath);\n\t\n\t if (aRootUrl) {\n\t aRootUrl.path = joined;\n\t return urlGenerate(aRootUrl);\n\t }\n\t return joined;\n\t}\n\texports.join = join;\n\t\n\texports.isAbsolute = function (aPath) {\n\t return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp);\n\t};\n\t\n\t/**\n\t * Make a path relative to a URL or another path.\n\t *\n\t * @param aRoot The root path or URL.\n\t * @param aPath The path or URL to be made relative to aRoot.\n\t */\n\tfunction relative(aRoot, aPath) {\n\t if (aRoot === \"\") {\n\t aRoot = \".\";\n\t }\n\t\n\t aRoot = aRoot.replace(/\\/$/, '');\n\t\n\t // It is possible for the path to be above the root. In this case, simply\n\t // checking whether the root is a prefix of the path won't work. Instead, we\n\t // need to remove components from the root one by one, until either we find\n\t // a prefix that fits, or we run out of components to remove.\n\t var level = 0;\n\t while (aPath.indexOf(aRoot + '/') !== 0) {\n\t var index = aRoot.lastIndexOf(\"/\");\n\t if (index < 0) {\n\t return aPath;\n\t }\n\t\n\t // If the only part of the root that is left is the scheme (i.e. http://,\n\t // file:///, etc.), one or more slashes (/), or simply nothing at all, we\n\t // have exhausted all components, so the path is not relative to the root.\n\t aRoot = aRoot.slice(0, index);\n\t if (aRoot.match(/^([^\\/]+:\\/)?\\/*$/)) {\n\t return aPath;\n\t }\n\t\n\t ++level;\n\t }\n\t\n\t // Make sure we add a \"../\" for each component we removed from the root.\n\t return Array(level + 1).join(\"../\") + aPath.substr(aRoot.length + 1);\n\t}\n\texports.relative = relative;\n\t\n\tvar supportsNullProto = (function () {\n\t var obj = Object.create(null);\n\t return !('__proto__' in obj);\n\t}());\n\t\n\tfunction identity (s) {\n\t return s;\n\t}\n\t\n\t/**\n\t * Because behavior goes wacky when you set `__proto__` on objects, we\n\t * have to prefix all the strings in our set with an arbitrary character.\n\t *\n\t * See https://github.com/mozilla/source-map/pull/31 and\n\t * https://github.com/mozilla/source-map/issues/30\n\t *\n\t * @param String aStr\n\t */\n\tfunction toSetString(aStr) {\n\t if (isProtoString(aStr)) {\n\t return '$' + aStr;\n\t }\n\t\n\t return aStr;\n\t}\n\texports.toSetString = supportsNullProto ? identity : toSetString;\n\t\n\tfunction fromSetString(aStr) {\n\t if (isProtoString(aStr)) {\n\t return aStr.slice(1);\n\t }\n\t\n\t return aStr;\n\t}\n\texports.fromSetString = supportsNullProto ? identity : fromSetString;\n\t\n\tfunction isProtoString(s) {\n\t if (!s) {\n\t return false;\n\t }\n\t\n\t var length = s.length;\n\t\n\t if (length < 9 /* \"__proto__\".length */) {\n\t return false;\n\t }\n\t\n\t if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||\n\t s.charCodeAt(length - 2) !== 95 /* '_' */ ||\n\t s.charCodeAt(length - 3) !== 111 /* 'o' */ ||\n\t s.charCodeAt(length - 4) !== 116 /* 't' */ ||\n\t s.charCodeAt(length - 5) !== 111 /* 'o' */ ||\n\t s.charCodeAt(length - 6) !== 114 /* 'r' */ ||\n\t s.charCodeAt(length - 7) !== 112 /* 'p' */ ||\n\t s.charCodeAt(length - 8) !== 95 /* '_' */ ||\n\t s.charCodeAt(length - 9) !== 95 /* '_' */) {\n\t return false;\n\t }\n\t\n\t for (var i = length - 10; i >= 0; i--) {\n\t if (s.charCodeAt(i) !== 36 /* '$' */) {\n\t return false;\n\t }\n\t }\n\t\n\t return true;\n\t}\n\t\n\t/**\n\t * Comparator between two mappings where the original positions are compared.\n\t *\n\t * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n\t * mappings with the same original source/line/column, but different generated\n\t * line and column the same. Useful when searching for a mapping with a\n\t * stubbed out mapping.\n\t */\n\tfunction compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {\n\t var cmp = mappingA.source - mappingB.source;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalLine - mappingB.originalLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalColumn - mappingB.originalColumn;\n\t if (cmp !== 0 || onlyCompareOriginal) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedLine - mappingB.generatedLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t return mappingA.name - mappingB.name;\n\t}\n\texports.compareByOriginalPositions = compareByOriginalPositions;\n\t\n\t/**\n\t * Comparator between two mappings with deflated source and name indices where\n\t * the generated positions are compared.\n\t *\n\t * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n\t * mappings with the same generated line and column, but different\n\t * source/name/original line and column the same. Useful when searching for a\n\t * mapping with a stubbed out mapping.\n\t */\n\tfunction compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {\n\t var cmp = mappingA.generatedLine - mappingB.generatedLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n\t if (cmp !== 0 || onlyCompareGenerated) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.source - mappingB.source;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalLine - mappingB.originalLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalColumn - mappingB.originalColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t return mappingA.name - mappingB.name;\n\t}\n\texports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;\n\t\n\tfunction strcmp(aStr1, aStr2) {\n\t if (aStr1 === aStr2) {\n\t return 0;\n\t }\n\t\n\t if (aStr1 > aStr2) {\n\t return 1;\n\t }\n\t\n\t return -1;\n\t}\n\t\n\t/**\n\t * Comparator between two mappings with inflated source and name strings where\n\t * the generated positions are compared.\n\t */\n\tfunction compareByGeneratedPositionsInflated(mappingA, mappingB) {\n\t var cmp = mappingA.generatedLine - mappingB.generatedLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = strcmp(mappingA.source, mappingB.source);\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalLine - mappingB.originalLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalColumn - mappingB.originalColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t return strcmp(mappingA.name, mappingB.name);\n\t}\n\texports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;\n\n\n/***/ }),\n/* 5 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar util = __webpack_require__(4);\n\tvar has = Object.prototype.hasOwnProperty;\n\tvar hasNativeMap = typeof Map !== \"undefined\";\n\t\n\t/**\n\t * A data structure which is a combination of an array and a set. Adding a new\n\t * member is O(1), testing for membership is O(1), and finding the index of an\n\t * element is O(1). Removing elements from the set is not supported. Only\n\t * strings are supported for membership.\n\t */\n\tfunction ArraySet() {\n\t this._array = [];\n\t this._set = hasNativeMap ? new Map() : Object.create(null);\n\t}\n\t\n\t/**\n\t * Static method for creating ArraySet instances from an existing array.\n\t */\n\tArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {\n\t var set = new ArraySet();\n\t for (var i = 0, len = aArray.length; i < len; i++) {\n\t set.add(aArray[i], aAllowDuplicates);\n\t }\n\t return set;\n\t};\n\t\n\t/**\n\t * Return how many unique items are in this ArraySet. If duplicates have been\n\t * added, than those do not count towards the size.\n\t *\n\t * @returns Number\n\t */\n\tArraySet.prototype.size = function ArraySet_size() {\n\t return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length;\n\t};\n\t\n\t/**\n\t * Add the given string to this set.\n\t *\n\t * @param String aStr\n\t */\n\tArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {\n\t var sStr = hasNativeMap ? aStr : util.toSetString(aStr);\n\t var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr);\n\t var idx = this._array.length;\n\t if (!isDuplicate || aAllowDuplicates) {\n\t this._array.push(aStr);\n\t }\n\t if (!isDuplicate) {\n\t if (hasNativeMap) {\n\t this._set.set(aStr, idx);\n\t } else {\n\t this._set[sStr] = idx;\n\t }\n\t }\n\t};\n\t\n\t/**\n\t * Is the given string a member of this set?\n\t *\n\t * @param String aStr\n\t */\n\tArraySet.prototype.has = function ArraySet_has(aStr) {\n\t if (hasNativeMap) {\n\t return this._set.has(aStr);\n\t } else {\n\t var sStr = util.toSetString(aStr);\n\t return has.call(this._set, sStr);\n\t }\n\t};\n\t\n\t/**\n\t * What is the index of the given string in the array?\n\t *\n\t * @param String aStr\n\t */\n\tArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {\n\t if (hasNativeMap) {\n\t var idx = this._set.get(aStr);\n\t if (idx >= 0) {\n\t return idx;\n\t }\n\t } else {\n\t var sStr = util.toSetString(aStr);\n\t if (has.call(this._set, sStr)) {\n\t return this._set[sStr];\n\t }\n\t }\n\t\n\t throw new Error('\"' + aStr + '\" is not in the set.');\n\t};\n\t\n\t/**\n\t * What is the element at the given index?\n\t *\n\t * @param Number aIdx\n\t */\n\tArraySet.prototype.at = function ArraySet_at(aIdx) {\n\t if (aIdx >= 0 && aIdx < this._array.length) {\n\t return this._array[aIdx];\n\t }\n\t throw new Error('No element indexed by ' + aIdx);\n\t};\n\t\n\t/**\n\t * Returns the array representation of this set (which has the proper indices\n\t * indicated by indexOf). Note that this is a copy of the internal array used\n\t * for storing the members so that no one can mess with internal state.\n\t */\n\tArraySet.prototype.toArray = function ArraySet_toArray() {\n\t return this._array.slice();\n\t};\n\t\n\texports.ArraySet = ArraySet;\n\n\n/***/ }),\n/* 6 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2014 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar util = __webpack_require__(4);\n\t\n\t/**\n\t * Determine whether mappingB is after mappingA with respect to generated\n\t * position.\n\t */\n\tfunction generatedPositionAfter(mappingA, mappingB) {\n\t // Optimized for most common case\n\t var lineA = mappingA.generatedLine;\n\t var lineB = mappingB.generatedLine;\n\t var columnA = mappingA.generatedColumn;\n\t var columnB = mappingB.generatedColumn;\n\t return lineB > lineA || lineB == lineA && columnB >= columnA ||\n\t util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n\t}\n\t\n\t/**\n\t * A data structure to provide a sorted view of accumulated mappings in a\n\t * performance conscious manner. It trades a neglibable overhead in general\n\t * case for a large speedup in case of mappings being added in order.\n\t */\n\tfunction MappingList() {\n\t this._array = [];\n\t this._sorted = true;\n\t // Serves as infimum\n\t this._last = {generatedLine: -1, generatedColumn: 0};\n\t}\n\t\n\t/**\n\t * Iterate through internal items. This method takes the same arguments that\n\t * `Array.prototype.forEach` takes.\n\t *\n\t * NOTE: The order of the mappings is NOT guaranteed.\n\t */\n\tMappingList.prototype.unsortedForEach =\n\t function MappingList_forEach(aCallback, aThisArg) {\n\t this._array.forEach(aCallback, aThisArg);\n\t };\n\t\n\t/**\n\t * Add the given source mapping.\n\t *\n\t * @param Object aMapping\n\t */\n\tMappingList.prototype.add = function MappingList_add(aMapping) {\n\t if (generatedPositionAfter(this._last, aMapping)) {\n\t this._last = aMapping;\n\t this._array.push(aMapping);\n\t } else {\n\t this._sorted = false;\n\t this._array.push(aMapping);\n\t }\n\t};\n\t\n\t/**\n\t * Returns the flat, sorted array of mappings. The mappings are sorted by\n\t * generated position.\n\t *\n\t * WARNING: This method returns internal data without copying, for\n\t * performance. The return value must NOT be mutated, and should be treated as\n\t * an immutable borrow. If you want to take ownership, you must make your own\n\t * copy.\n\t */\n\tMappingList.prototype.toArray = function MappingList_toArray() {\n\t if (!this._sorted) {\n\t this._array.sort(util.compareByGeneratedPositionsInflated);\n\t this._sorted = true;\n\t }\n\t return this._array;\n\t};\n\t\n\texports.MappingList = MappingList;\n\n\n/***/ }),\n/* 7 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar util = __webpack_require__(4);\n\tvar binarySearch = __webpack_require__(8);\n\tvar ArraySet = __webpack_require__(5).ArraySet;\n\tvar base64VLQ = __webpack_require__(2);\n\tvar quickSort = __webpack_require__(9).quickSort;\n\t\n\tfunction SourceMapConsumer(aSourceMap) {\n\t var sourceMap = aSourceMap;\n\t if (typeof aSourceMap === 'string') {\n\t sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n\t }\n\t\n\t return sourceMap.sections != null\n\t ? new IndexedSourceMapConsumer(sourceMap)\n\t : new BasicSourceMapConsumer(sourceMap);\n\t}\n\t\n\tSourceMapConsumer.fromSourceMap = function(aSourceMap) {\n\t return BasicSourceMapConsumer.fromSourceMap(aSourceMap);\n\t}\n\t\n\t/**\n\t * The version of the source mapping spec that we are consuming.\n\t */\n\tSourceMapConsumer.prototype._version = 3;\n\t\n\t// `__generatedMappings` and `__originalMappings` are arrays that hold the\n\t// parsed mapping coordinates from the source map's \"mappings\" attribute. They\n\t// are lazily instantiated, accessed via the `_generatedMappings` and\n\t// `_originalMappings` getters respectively, and we only parse the mappings\n\t// and create these arrays once queried for a source location. We jump through\n\t// these hoops because there can be many thousands of mappings, and parsing\n\t// them is expensive, so we only want to do it if we must.\n\t//\n\t// Each object in the arrays is of the form:\n\t//\n\t// {\n\t// generatedLine: The line number in the generated code,\n\t// generatedColumn: The column number in the generated code,\n\t// source: The path to the original source file that generated this\n\t// chunk of code,\n\t// originalLine: The line number in the original source that\n\t// corresponds to this chunk of generated code,\n\t// originalColumn: The column number in the original source that\n\t// corresponds to this chunk of generated code,\n\t// name: The name of the original symbol which generated this chunk of\n\t// code.\n\t// }\n\t//\n\t// All properties except for `generatedLine` and `generatedColumn` can be\n\t// `null`.\n\t//\n\t// `_generatedMappings` is ordered by the generated positions.\n\t//\n\t// `_originalMappings` is ordered by the original positions.\n\t\n\tSourceMapConsumer.prototype.__generatedMappings = null;\n\tObject.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {\n\t get: function () {\n\t if (!this.__generatedMappings) {\n\t this._parseMappings(this._mappings, this.sourceRoot);\n\t }\n\t\n\t return this.__generatedMappings;\n\t }\n\t});\n\t\n\tSourceMapConsumer.prototype.__originalMappings = null;\n\tObject.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {\n\t get: function () {\n\t if (!this.__originalMappings) {\n\t this._parseMappings(this._mappings, this.sourceRoot);\n\t }\n\t\n\t return this.__originalMappings;\n\t }\n\t});\n\t\n\tSourceMapConsumer.prototype._charIsMappingSeparator =\n\t function SourceMapConsumer_charIsMappingSeparator(aStr, index) {\n\t var c = aStr.charAt(index);\n\t return c === \";\" || c === \",\";\n\t };\n\t\n\t/**\n\t * Parse the mappings in a string in to a data structure which we can easily\n\t * query (the ordered arrays in the `this.__generatedMappings` and\n\t * `this.__originalMappings` properties).\n\t */\n\tSourceMapConsumer.prototype._parseMappings =\n\t function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n\t throw new Error(\"Subclasses must implement _parseMappings\");\n\t };\n\t\n\tSourceMapConsumer.GENERATED_ORDER = 1;\n\tSourceMapConsumer.ORIGINAL_ORDER = 2;\n\t\n\tSourceMapConsumer.GREATEST_LOWER_BOUND = 1;\n\tSourceMapConsumer.LEAST_UPPER_BOUND = 2;\n\t\n\t/**\n\t * Iterate over each mapping between an original source/line/column and a\n\t * generated line/column in this source map.\n\t *\n\t * @param Function aCallback\n\t * The function that is called with each mapping.\n\t * @param Object aContext\n\t * Optional. If specified, this object will be the value of `this` every\n\t * time that `aCallback` is called.\n\t * @param aOrder\n\t * Either `SourceMapConsumer.GENERATED_ORDER` or\n\t * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to\n\t * iterate over the mappings sorted by the generated file's line/column\n\t * order or the original's source/line/column order, respectively. Defaults to\n\t * `SourceMapConsumer.GENERATED_ORDER`.\n\t */\n\tSourceMapConsumer.prototype.eachMapping =\n\t function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {\n\t var context = aContext || null;\n\t var order = aOrder || SourceMapConsumer.GENERATED_ORDER;\n\t\n\t var mappings;\n\t switch (order) {\n\t case SourceMapConsumer.GENERATED_ORDER:\n\t mappings = this._generatedMappings;\n\t break;\n\t case SourceMapConsumer.ORIGINAL_ORDER:\n\t mappings = this._originalMappings;\n\t break;\n\t default:\n\t throw new Error(\"Unknown order of iteration.\");\n\t }\n\t\n\t var sourceRoot = this.sourceRoot;\n\t mappings.map(function (mapping) {\n\t var source = mapping.source === null ? null : this._sources.at(mapping.source);\n\t if (source != null && sourceRoot != null) {\n\t source = util.join(sourceRoot, source);\n\t }\n\t return {\n\t source: source,\n\t generatedLine: mapping.generatedLine,\n\t generatedColumn: mapping.generatedColumn,\n\t originalLine: mapping.originalLine,\n\t originalColumn: mapping.originalColumn,\n\t name: mapping.name === null ? null : this._names.at(mapping.name)\n\t };\n\t }, this).forEach(aCallback, context);\n\t };\n\t\n\t/**\n\t * Returns all generated line and column information for the original source,\n\t * line, and column provided. If no column is provided, returns all mappings\n\t * corresponding to a either the line we are searching for or the next\n\t * closest line that has any mappings. Otherwise, returns all mappings\n\t * corresponding to the given line and either the column we are searching for\n\t * or the next closest column that has any offsets.\n\t *\n\t * The only argument is an object with the following properties:\n\t *\n\t * - source: The filename of the original source.\n\t * - line: The line number in the original source.\n\t * - column: Optional. the column number in the original source.\n\t *\n\t * and an array of objects is returned, each with the following properties:\n\t *\n\t * - line: The line number in the generated source, or null.\n\t * - column: The column number in the generated source, or null.\n\t */\n\tSourceMapConsumer.prototype.allGeneratedPositionsFor =\n\t function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {\n\t var line = util.getArg(aArgs, 'line');\n\t\n\t // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping\n\t // returns the index of the closest mapping less than the needle. By\n\t // setting needle.originalColumn to 0, we thus find the last mapping for\n\t // the given line, provided such a mapping exists.\n\t var needle = {\n\t source: util.getArg(aArgs, 'source'),\n\t originalLine: line,\n\t originalColumn: util.getArg(aArgs, 'column', 0)\n\t };\n\t\n\t if (this.sourceRoot != null) {\n\t needle.source = util.relative(this.sourceRoot, needle.source);\n\t }\n\t if (!this._sources.has(needle.source)) {\n\t return [];\n\t }\n\t needle.source = this._sources.indexOf(needle.source);\n\t\n\t var mappings = [];\n\t\n\t var index = this._findMapping(needle,\n\t this._originalMappings,\n\t \"originalLine\",\n\t \"originalColumn\",\n\t util.compareByOriginalPositions,\n\t binarySearch.LEAST_UPPER_BOUND);\n\t if (index >= 0) {\n\t var mapping = this._originalMappings[index];\n\t\n\t if (aArgs.column === undefined) {\n\t var originalLine = mapping.originalLine;\n\t\n\t // Iterate until either we run out of mappings, or we run into\n\t // a mapping for a different line than the one we found. Since\n\t // mappings are sorted, this is guaranteed to find all mappings for\n\t // the line we found.\n\t while (mapping && mapping.originalLine === originalLine) {\n\t mappings.push({\n\t line: util.getArg(mapping, 'generatedLine', null),\n\t column: util.getArg(mapping, 'generatedColumn', null),\n\t lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n\t });\n\t\n\t mapping = this._originalMappings[++index];\n\t }\n\t } else {\n\t var originalColumn = mapping.originalColumn;\n\t\n\t // Iterate until either we run out of mappings, or we run into\n\t // a mapping for a different line than the one we were searching for.\n\t // Since mappings are sorted, this is guaranteed to find all mappings for\n\t // the line we are searching for.\n\t while (mapping &&\n\t mapping.originalLine === line &&\n\t mapping.originalColumn == originalColumn) {\n\t mappings.push({\n\t line: util.getArg(mapping, 'generatedLine', null),\n\t column: util.getArg(mapping, 'generatedColumn', null),\n\t lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n\t });\n\t\n\t mapping = this._originalMappings[++index];\n\t }\n\t }\n\t }\n\t\n\t return mappings;\n\t };\n\t\n\texports.SourceMapConsumer = SourceMapConsumer;\n\t\n\t/**\n\t * A BasicSourceMapConsumer instance represents a parsed source map which we can\n\t * query for information about the original file positions by giving it a file\n\t * position in the generated source.\n\t *\n\t * The only parameter is the raw source map (either as a JSON string, or\n\t * already parsed to an object). According to the spec, source maps have the\n\t * following attributes:\n\t *\n\t * - version: Which version of the source map spec this map is following.\n\t * - sources: An array of URLs to the original source files.\n\t * - names: An array of identifiers which can be referrenced by individual mappings.\n\t * - sourceRoot: Optional. The URL root from which all sources are relative.\n\t * - sourcesContent: Optional. An array of contents of the original source files.\n\t * - mappings: A string of base64 VLQs which contain the actual mappings.\n\t * - file: Optional. The generated file this source map is associated with.\n\t *\n\t * Here is an example source map, taken from the source map spec[0]:\n\t *\n\t * {\n\t * version : 3,\n\t * file: \"out.js\",\n\t * sourceRoot : \"\",\n\t * sources: [\"foo.js\", \"bar.js\"],\n\t * names: [\"src\", \"maps\", \"are\", \"fun\"],\n\t * mappings: \"AA,AB;;ABCDE;\"\n\t * }\n\t *\n\t * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#\n\t */\n\tfunction BasicSourceMapConsumer(aSourceMap) {\n\t var sourceMap = aSourceMap;\n\t if (typeof aSourceMap === 'string') {\n\t sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n\t }\n\t\n\t var version = util.getArg(sourceMap, 'version');\n\t var sources = util.getArg(sourceMap, 'sources');\n\t // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which\n\t // requires the array) to play nice here.\n\t var names = util.getArg(sourceMap, 'names', []);\n\t var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);\n\t var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);\n\t var mappings = util.getArg(sourceMap, 'mappings');\n\t var file = util.getArg(sourceMap, 'file', null);\n\t\n\t // Once again, Sass deviates from the spec and supplies the version as a\n\t // string rather than a number, so we use loose equality checking here.\n\t if (version != this._version) {\n\t throw new Error('Unsupported version: ' + version);\n\t }\n\t\n\t sources = sources\n\t .map(String)\n\t // Some source maps produce relative source paths like \"./foo.js\" instead of\n\t // \"foo.js\". Normalize these first so that future comparisons will succeed.\n\t // See bugzil.la/1090768.\n\t .map(util.normalize)\n\t // Always ensure that absolute sources are internally stored relative to\n\t // the source root, if the source root is absolute. Not doing this would\n\t // be particularly problematic when the source root is a prefix of the\n\t // source (valid, but why??). See github issue #199 and bugzil.la/1188982.\n\t .map(function (source) {\n\t return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)\n\t ? util.relative(sourceRoot, source)\n\t : source;\n\t });\n\t\n\t // Pass `true` below to allow duplicate names and sources. While source maps\n\t // are intended to be compressed and deduplicated, the TypeScript compiler\n\t // sometimes generates source maps with duplicates in them. See Github issue\n\t // #72 and bugzil.la/889492.\n\t this._names = ArraySet.fromArray(names.map(String), true);\n\t this._sources = ArraySet.fromArray(sources, true);\n\t\n\t this.sourceRoot = sourceRoot;\n\t this.sourcesContent = sourcesContent;\n\t this._mappings = mappings;\n\t this.file = file;\n\t}\n\t\n\tBasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\n\tBasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;\n\t\n\t/**\n\t * Create a BasicSourceMapConsumer from a SourceMapGenerator.\n\t *\n\t * @param SourceMapGenerator aSourceMap\n\t * The source map that will be consumed.\n\t * @returns BasicSourceMapConsumer\n\t */\n\tBasicSourceMapConsumer.fromSourceMap =\n\t function SourceMapConsumer_fromSourceMap(aSourceMap) {\n\t var smc = Object.create(BasicSourceMapConsumer.prototype);\n\t\n\t var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);\n\t var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);\n\t smc.sourceRoot = aSourceMap._sourceRoot;\n\t smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),\n\t smc.sourceRoot);\n\t smc.file = aSourceMap._file;\n\t\n\t // Because we are modifying the entries (by converting string sources and\n\t // names to indices into the sources and names ArraySets), we have to make\n\t // a copy of the entry or else bad things happen. Shared mutable state\n\t // strikes again! See github issue #191.\n\t\n\t var generatedMappings = aSourceMap._mappings.toArray().slice();\n\t var destGeneratedMappings = smc.__generatedMappings = [];\n\t var destOriginalMappings = smc.__originalMappings = [];\n\t\n\t for (var i = 0, length = generatedMappings.length; i < length; i++) {\n\t var srcMapping = generatedMappings[i];\n\t var destMapping = new Mapping;\n\t destMapping.generatedLine = srcMapping.generatedLine;\n\t destMapping.generatedColumn = srcMapping.generatedColumn;\n\t\n\t if (srcMapping.source) {\n\t destMapping.source = sources.indexOf(srcMapping.source);\n\t destMapping.originalLine = srcMapping.originalLine;\n\t destMapping.originalColumn = srcMapping.originalColumn;\n\t\n\t if (srcMapping.name) {\n\t destMapping.name = names.indexOf(srcMapping.name);\n\t }\n\t\n\t destOriginalMappings.push(destMapping);\n\t }\n\t\n\t destGeneratedMappings.push(destMapping);\n\t }\n\t\n\t quickSort(smc.__originalMappings, util.compareByOriginalPositions);\n\t\n\t return smc;\n\t };\n\t\n\t/**\n\t * The version of the source mapping spec that we are consuming.\n\t */\n\tBasicSourceMapConsumer.prototype._version = 3;\n\t\n\t/**\n\t * The list of original sources.\n\t */\n\tObject.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {\n\t get: function () {\n\t return this._sources.toArray().map(function (s) {\n\t return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;\n\t }, this);\n\t }\n\t});\n\t\n\t/**\n\t * Provide the JIT with a nice shape / hidden class.\n\t */\n\tfunction Mapping() {\n\t this.generatedLine = 0;\n\t this.generatedColumn = 0;\n\t this.source = null;\n\t this.originalLine = null;\n\t this.originalColumn = null;\n\t this.name = null;\n\t}\n\t\n\t/**\n\t * Parse the mappings in a string in to a data structure which we can easily\n\t * query (the ordered arrays in the `this.__generatedMappings` and\n\t * `this.__originalMappings` properties).\n\t */\n\tBasicSourceMapConsumer.prototype._parseMappings =\n\t function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n\t var generatedLine = 1;\n\t var previousGeneratedColumn = 0;\n\t var previousOriginalLine = 0;\n\t var previousOriginalColumn = 0;\n\t var previousSource = 0;\n\t var previousName = 0;\n\t var length = aStr.length;\n\t var index = 0;\n\t var cachedSegments = {};\n\t var temp = {};\n\t var originalMappings = [];\n\t var generatedMappings = [];\n\t var mapping, str, segment, end, value;\n\t\n\t while (index < length) {\n\t if (aStr.charAt(index) === ';') {\n\t generatedLine++;\n\t index++;\n\t previousGeneratedColumn = 0;\n\t }\n\t else if (aStr.charAt(index) === ',') {\n\t index++;\n\t }\n\t else {\n\t mapping = new Mapping();\n\t mapping.generatedLine = generatedLine;\n\t\n\t // Because each offset is encoded relative to the previous one,\n\t // many segments often have the same encoding. We can exploit this\n\t // fact by caching the parsed variable length fields of each segment,\n\t // allowing us to avoid a second parse if we encounter the same\n\t // segment again.\n\t for (end = index; end < length; end++) {\n\t if (this._charIsMappingSeparator(aStr, end)) {\n\t break;\n\t }\n\t }\n\t str = aStr.slice(index, end);\n\t\n\t segment = cachedSegments[str];\n\t if (segment) {\n\t index += str.length;\n\t } else {\n\t segment = [];\n\t while (index < end) {\n\t base64VLQ.decode(aStr, index, temp);\n\t value = temp.value;\n\t index = temp.rest;\n\t segment.push(value);\n\t }\n\t\n\t if (segment.length === 2) {\n\t throw new Error('Found a source, but no line and column');\n\t }\n\t\n\t if (segment.length === 3) {\n\t throw new Error('Found a source and line, but no column');\n\t }\n\t\n\t cachedSegments[str] = segment;\n\t }\n\t\n\t // Generated column.\n\t mapping.generatedColumn = previousGeneratedColumn + segment[0];\n\t previousGeneratedColumn = mapping.generatedColumn;\n\t\n\t if (segment.length > 1) {\n\t // Original source.\n\t mapping.source = previousSource + segment[1];\n\t previousSource += segment[1];\n\t\n\t // Original line.\n\t mapping.originalLine = previousOriginalLine + segment[2];\n\t previousOriginalLine = mapping.originalLine;\n\t // Lines are stored 0-based\n\t mapping.originalLine += 1;\n\t\n\t // Original column.\n\t mapping.originalColumn = previousOriginalColumn + segment[3];\n\t previousOriginalColumn = mapping.originalColumn;\n\t\n\t if (segment.length > 4) {\n\t // Original name.\n\t mapping.name = previousName + segment[4];\n\t previousName += segment[4];\n\t }\n\t }\n\t\n\t generatedMappings.push(mapping);\n\t if (typeof mapping.originalLine === 'number') {\n\t originalMappings.push(mapping);\n\t }\n\t }\n\t }\n\t\n\t quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);\n\t this.__generatedMappings = generatedMappings;\n\t\n\t quickSort(originalMappings, util.compareByOriginalPositions);\n\t this.__originalMappings = originalMappings;\n\t };\n\t\n\t/**\n\t * Find the mapping that best matches the hypothetical \"needle\" mapping that\n\t * we are searching for in the given \"haystack\" of mappings.\n\t */\n\tBasicSourceMapConsumer.prototype._findMapping =\n\t function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,\n\t aColumnName, aComparator, aBias) {\n\t // To return the position we are searching for, we must first find the\n\t // mapping for the given position and then return the opposite position it\n\t // points to. Because the mappings are sorted, we can use binary search to\n\t // find the best mapping.\n\t\n\t if (aNeedle[aLineName] <= 0) {\n\t throw new TypeError('Line must be greater than or equal to 1, got '\n\t + aNeedle[aLineName]);\n\t }\n\t if (aNeedle[aColumnName] < 0) {\n\t throw new TypeError('Column must be greater than or equal to 0, got '\n\t + aNeedle[aColumnName]);\n\t }\n\t\n\t return binarySearch.search(aNeedle, aMappings, aComparator, aBias);\n\t };\n\t\n\t/**\n\t * Compute the last column for each generated mapping. The last column is\n\t * inclusive.\n\t */\n\tBasicSourceMapConsumer.prototype.computeColumnSpans =\n\t function SourceMapConsumer_computeColumnSpans() {\n\t for (var index = 0; index < this._generatedMappings.length; ++index) {\n\t var mapping = this._generatedMappings[index];\n\t\n\t // Mappings do not contain a field for the last generated columnt. We\n\t // can come up with an optimistic estimate, however, by assuming that\n\t // mappings are contiguous (i.e. given two consecutive mappings, the\n\t // first mapping ends where the second one starts).\n\t if (index + 1 < this._generatedMappings.length) {\n\t var nextMapping = this._generatedMappings[index + 1];\n\t\n\t if (mapping.generatedLine === nextMapping.generatedLine) {\n\t mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;\n\t continue;\n\t }\n\t }\n\t\n\t // The last mapping for each line spans the entire line.\n\t mapping.lastGeneratedColumn = Infinity;\n\t }\n\t };\n\t\n\t/**\n\t * Returns the original source, line, and column information for the generated\n\t * source's line and column positions provided. The only argument is an object\n\t * with the following properties:\n\t *\n\t * - line: The line number in the generated source.\n\t * - column: The column number in the generated source.\n\t * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n\t * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - source: The original source file, or null.\n\t * - line: The line number in the original source, or null.\n\t * - column: The column number in the original source, or null.\n\t * - name: The original identifier, or null.\n\t */\n\tBasicSourceMapConsumer.prototype.originalPositionFor =\n\t function SourceMapConsumer_originalPositionFor(aArgs) {\n\t var needle = {\n\t generatedLine: util.getArg(aArgs, 'line'),\n\t generatedColumn: util.getArg(aArgs, 'column')\n\t };\n\t\n\t var index = this._findMapping(\n\t needle,\n\t this._generatedMappings,\n\t \"generatedLine\",\n\t \"generatedColumn\",\n\t util.compareByGeneratedPositionsDeflated,\n\t util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n\t );\n\t\n\t if (index >= 0) {\n\t var mapping = this._generatedMappings[index];\n\t\n\t if (mapping.generatedLine === needle.generatedLine) {\n\t var source = util.getArg(mapping, 'source', null);\n\t if (source !== null) {\n\t source = this._sources.at(source);\n\t if (this.sourceRoot != null) {\n\t source = util.join(this.sourceRoot, source);\n\t }\n\t }\n\t var name = util.getArg(mapping, 'name', null);\n\t if (name !== null) {\n\t name = this._names.at(name);\n\t }\n\t return {\n\t source: source,\n\t line: util.getArg(mapping, 'originalLine', null),\n\t column: util.getArg(mapping, 'originalColumn', null),\n\t name: name\n\t };\n\t }\n\t }\n\t\n\t return {\n\t source: null,\n\t line: null,\n\t column: null,\n\t name: null\n\t };\n\t };\n\t\n\t/**\n\t * Return true if we have the source content for every source in the source\n\t * map, false otherwise.\n\t */\n\tBasicSourceMapConsumer.prototype.hasContentsOfAllSources =\n\t function BasicSourceMapConsumer_hasContentsOfAllSources() {\n\t if (!this.sourcesContent) {\n\t return false;\n\t }\n\t return this.sourcesContent.length >= this._sources.size() &&\n\t !this.sourcesContent.some(function (sc) { return sc == null; });\n\t };\n\t\n\t/**\n\t * Returns the original source content. The only argument is the url of the\n\t * original source file. Returns null if no original source content is\n\t * available.\n\t */\n\tBasicSourceMapConsumer.prototype.sourceContentFor =\n\t function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n\t if (!this.sourcesContent) {\n\t return null;\n\t }\n\t\n\t if (this.sourceRoot != null) {\n\t aSource = util.relative(this.sourceRoot, aSource);\n\t }\n\t\n\t if (this._sources.has(aSource)) {\n\t return this.sourcesContent[this._sources.indexOf(aSource)];\n\t }\n\t\n\t var url;\n\t if (this.sourceRoot != null\n\t && (url = util.urlParse(this.sourceRoot))) {\n\t // XXX: file:// URIs and absolute paths lead to unexpected behavior for\n\t // many users. We can help them out when they expect file:// URIs to\n\t // behave like it would if they were running a local HTTP server. See\n\t // https://bugzilla.mozilla.org/show_bug.cgi?id=885597.\n\t var fileUriAbsPath = aSource.replace(/^file:\\/\\//, \"\");\n\t if (url.scheme == \"file\"\n\t && this._sources.has(fileUriAbsPath)) {\n\t return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]\n\t }\n\t\n\t if ((!url.path || url.path == \"/\")\n\t && this._sources.has(\"/\" + aSource)) {\n\t return this.sourcesContent[this._sources.indexOf(\"/\" + aSource)];\n\t }\n\t }\n\t\n\t // This function is used recursively from\n\t // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we\n\t // don't want to throw if we can't find the source - we just want to\n\t // return null, so we provide a flag to exit gracefully.\n\t if (nullOnMissing) {\n\t return null;\n\t }\n\t else {\n\t throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n\t }\n\t };\n\t\n\t/**\n\t * Returns the generated line and column information for the original source,\n\t * line, and column positions provided. The only argument is an object with\n\t * the following properties:\n\t *\n\t * - source: The filename of the original source.\n\t * - line: The line number in the original source.\n\t * - column: The column number in the original source.\n\t * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n\t * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - line: The line number in the generated source, or null.\n\t * - column: The column number in the generated source, or null.\n\t */\n\tBasicSourceMapConsumer.prototype.generatedPositionFor =\n\t function SourceMapConsumer_generatedPositionFor(aArgs) {\n\t var source = util.getArg(aArgs, 'source');\n\t if (this.sourceRoot != null) {\n\t source = util.relative(this.sourceRoot, source);\n\t }\n\t if (!this._sources.has(source)) {\n\t return {\n\t line: null,\n\t column: null,\n\t lastColumn: null\n\t };\n\t }\n\t source = this._sources.indexOf(source);\n\t\n\t var needle = {\n\t source: source,\n\t originalLine: util.getArg(aArgs, 'line'),\n\t originalColumn: util.getArg(aArgs, 'column')\n\t };\n\t\n\t var index = this._findMapping(\n\t needle,\n\t this._originalMappings,\n\t \"originalLine\",\n\t \"originalColumn\",\n\t util.compareByOriginalPositions,\n\t util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n\t );\n\t\n\t if (index >= 0) {\n\t var mapping = this._originalMappings[index];\n\t\n\t if (mapping.source === needle.source) {\n\t return {\n\t line: util.getArg(mapping, 'generatedLine', null),\n\t column: util.getArg(mapping, 'generatedColumn', null),\n\t lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n\t };\n\t }\n\t }\n\t\n\t return {\n\t line: null,\n\t column: null,\n\t lastColumn: null\n\t };\n\t };\n\t\n\texports.BasicSourceMapConsumer = BasicSourceMapConsumer;\n\t\n\t/**\n\t * An IndexedSourceMapConsumer instance represents a parsed source map which\n\t * we can query for information. It differs from BasicSourceMapConsumer in\n\t * that it takes \"indexed\" source maps (i.e. ones with a \"sections\" field) as\n\t * input.\n\t *\n\t * The only parameter is a raw source map (either as a JSON string, or already\n\t * parsed to an object). According to the spec for indexed source maps, they\n\t * have the following attributes:\n\t *\n\t * - version: Which version of the source map spec this map is following.\n\t * - file: Optional. The generated file this source map is associated with.\n\t * - sections: A list of section definitions.\n\t *\n\t * Each value under the \"sections\" field has two fields:\n\t * - offset: The offset into the original specified at which this section\n\t * begins to apply, defined as an object with a \"line\" and \"column\"\n\t * field.\n\t * - map: A source map definition. This source map could also be indexed,\n\t * but doesn't have to be.\n\t *\n\t * Instead of the \"map\" field, it's also possible to have a \"url\" field\n\t * specifying a URL to retrieve a source map from, but that's currently\n\t * unsupported.\n\t *\n\t * Here's an example source map, taken from the source map spec[0], but\n\t * modified to omit a section which uses the \"url\" field.\n\t *\n\t * {\n\t * version : 3,\n\t * file: \"app.js\",\n\t * sections: [{\n\t * offset: {line:100, column:10},\n\t * map: {\n\t * version : 3,\n\t * file: \"section.js\",\n\t * sources: [\"foo.js\", \"bar.js\"],\n\t * names: [\"src\", \"maps\", \"are\", \"fun\"],\n\t * mappings: \"AAAA,E;;ABCDE;\"\n\t * }\n\t * }],\n\t * }\n\t *\n\t * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt\n\t */\n\tfunction IndexedSourceMapConsumer(aSourceMap) {\n\t var sourceMap = aSourceMap;\n\t if (typeof aSourceMap === 'string') {\n\t sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n\t }\n\t\n\t var version = util.getArg(sourceMap, 'version');\n\t var sections = util.getArg(sourceMap, 'sections');\n\t\n\t if (version != this._version) {\n\t throw new Error('Unsupported version: ' + version);\n\t }\n\t\n\t this._sources = new ArraySet();\n\t this._names = new ArraySet();\n\t\n\t var lastOffset = {\n\t line: -1,\n\t column: 0\n\t };\n\t this._sections = sections.map(function (s) {\n\t if (s.url) {\n\t // The url field will require support for asynchronicity.\n\t // See https://github.com/mozilla/source-map/issues/16\n\t throw new Error('Support for url field in sections not implemented.');\n\t }\n\t var offset = util.getArg(s, 'offset');\n\t var offsetLine = util.getArg(offset, 'line');\n\t var offsetColumn = util.getArg(offset, 'column');\n\t\n\t if (offsetLine < lastOffset.line ||\n\t (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {\n\t throw new Error('Section offsets must be ordered and non-overlapping.');\n\t }\n\t lastOffset = offset;\n\t\n\t return {\n\t generatedOffset: {\n\t // The offset fields are 0-based, but we use 1-based indices when\n\t // encoding/decoding from VLQ.\n\t generatedLine: offsetLine + 1,\n\t generatedColumn: offsetColumn + 1\n\t },\n\t consumer: new SourceMapConsumer(util.getArg(s, 'map'))\n\t }\n\t });\n\t}\n\t\n\tIndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\n\tIndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;\n\t\n\t/**\n\t * The version of the source mapping spec that we are consuming.\n\t */\n\tIndexedSourceMapConsumer.prototype._version = 3;\n\t\n\t/**\n\t * The list of original sources.\n\t */\n\tObject.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {\n\t get: function () {\n\t var sources = [];\n\t for (var i = 0; i < this._sections.length; i++) {\n\t for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {\n\t sources.push(this._sections[i].consumer.sources[j]);\n\t }\n\t }\n\t return sources;\n\t }\n\t});\n\t\n\t/**\n\t * Returns the original source, line, and column information for the generated\n\t * source's line and column positions provided. The only argument is an object\n\t * with the following properties:\n\t *\n\t * - line: The line number in the generated source.\n\t * - column: The column number in the generated source.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - source: The original source file, or null.\n\t * - line: The line number in the original source, or null.\n\t * - column: The column number in the original source, or null.\n\t * - name: The original identifier, or null.\n\t */\n\tIndexedSourceMapConsumer.prototype.originalPositionFor =\n\t function IndexedSourceMapConsumer_originalPositionFor(aArgs) {\n\t var needle = {\n\t generatedLine: util.getArg(aArgs, 'line'),\n\t generatedColumn: util.getArg(aArgs, 'column')\n\t };\n\t\n\t // Find the section containing the generated position we're trying to map\n\t // to an original position.\n\t var sectionIndex = binarySearch.search(needle, this._sections,\n\t function(needle, section) {\n\t var cmp = needle.generatedLine - section.generatedOffset.generatedLine;\n\t if (cmp) {\n\t return cmp;\n\t }\n\t\n\t return (needle.generatedColumn -\n\t section.generatedOffset.generatedColumn);\n\t });\n\t var section = this._sections[sectionIndex];\n\t\n\t if (!section) {\n\t return {\n\t source: null,\n\t line: null,\n\t column: null,\n\t name: null\n\t };\n\t }\n\t\n\t return section.consumer.originalPositionFor({\n\t line: needle.generatedLine -\n\t (section.generatedOffset.generatedLine - 1),\n\t column: needle.generatedColumn -\n\t (section.generatedOffset.generatedLine === needle.generatedLine\n\t ? section.generatedOffset.generatedColumn - 1\n\t : 0),\n\t bias: aArgs.bias\n\t });\n\t };\n\t\n\t/**\n\t * Return true if we have the source content for every source in the source\n\t * map, false otherwise.\n\t */\n\tIndexedSourceMapConsumer.prototype.hasContentsOfAllSources =\n\t function IndexedSourceMapConsumer_hasContentsOfAllSources() {\n\t return this._sections.every(function (s) {\n\t return s.consumer.hasContentsOfAllSources();\n\t });\n\t };\n\t\n\t/**\n\t * Returns the original source content. The only argument is the url of the\n\t * original source file. Returns null if no original source content is\n\t * available.\n\t */\n\tIndexedSourceMapConsumer.prototype.sourceContentFor =\n\t function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n\t for (var i = 0; i < this._sections.length; i++) {\n\t var section = this._sections[i];\n\t\n\t var content = section.consumer.sourceContentFor(aSource, true);\n\t if (content) {\n\t return content;\n\t }\n\t }\n\t if (nullOnMissing) {\n\t return null;\n\t }\n\t else {\n\t throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n\t }\n\t };\n\t\n\t/**\n\t * Returns the generated line and column information for the original source,\n\t * line, and column positions provided. The only argument is an object with\n\t * the following properties:\n\t *\n\t * - source: The filename of the original source.\n\t * - line: The line number in the original source.\n\t * - column: The column number in the original source.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - line: The line number in the generated source, or null.\n\t * - column: The column number in the generated source, or null.\n\t */\n\tIndexedSourceMapConsumer.prototype.generatedPositionFor =\n\t function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {\n\t for (var i = 0; i < this._sections.length; i++) {\n\t var section = this._sections[i];\n\t\n\t // Only consider this section if the requested source is in the list of\n\t // sources of the consumer.\n\t if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) {\n\t continue;\n\t }\n\t var generatedPosition = section.consumer.generatedPositionFor(aArgs);\n\t if (generatedPosition) {\n\t var ret = {\n\t line: generatedPosition.line +\n\t (section.generatedOffset.generatedLine - 1),\n\t column: generatedPosition.column +\n\t (section.generatedOffset.generatedLine === generatedPosition.line\n\t ? section.generatedOffset.generatedColumn - 1\n\t : 0)\n\t };\n\t return ret;\n\t }\n\t }\n\t\n\t return {\n\t line: null,\n\t column: null\n\t };\n\t };\n\t\n\t/**\n\t * Parse the mappings in a string in to a data structure which we can easily\n\t * query (the ordered arrays in the `this.__generatedMappings` and\n\t * `this.__originalMappings` properties).\n\t */\n\tIndexedSourceMapConsumer.prototype._parseMappings =\n\t function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n\t this.__generatedMappings = [];\n\t this.__originalMappings = [];\n\t for (var i = 0; i < this._sections.length; i++) {\n\t var section = this._sections[i];\n\t var sectionMappings = section.consumer._generatedMappings;\n\t for (var j = 0; j < sectionMappings.length; j++) {\n\t var mapping = sectionMappings[j];\n\t\n\t var source = section.consumer._sources.at(mapping.source);\n\t if (section.consumer.sourceRoot !== null) {\n\t source = util.join(section.consumer.sourceRoot, source);\n\t }\n\t this._sources.add(source);\n\t source = this._sources.indexOf(source);\n\t\n\t var name = section.consumer._names.at(mapping.name);\n\t this._names.add(name);\n\t name = this._names.indexOf(name);\n\t\n\t // The mappings coming from the consumer for the section have\n\t // generated positions relative to the start of the section, so we\n\t // need to offset them to be relative to the start of the concatenated\n\t // generated file.\n\t var adjustedMapping = {\n\t source: source,\n\t generatedLine: mapping.generatedLine +\n\t (section.generatedOffset.generatedLine - 1),\n\t generatedColumn: mapping.generatedColumn +\n\t (section.generatedOffset.generatedLine === mapping.generatedLine\n\t ? section.generatedOffset.generatedColumn - 1\n\t : 0),\n\t originalLine: mapping.originalLine,\n\t originalColumn: mapping.originalColumn,\n\t name: name\n\t };\n\t\n\t this.__generatedMappings.push(adjustedMapping);\n\t if (typeof adjustedMapping.originalLine === 'number') {\n\t this.__originalMappings.push(adjustedMapping);\n\t }\n\t }\n\t }\n\t\n\t quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);\n\t quickSort(this.__originalMappings, util.compareByOriginalPositions);\n\t };\n\t\n\texports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;\n\n\n/***/ }),\n/* 8 */\n/***/ (function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\texports.GREATEST_LOWER_BOUND = 1;\n\texports.LEAST_UPPER_BOUND = 2;\n\t\n\t/**\n\t * Recursive implementation of binary search.\n\t *\n\t * @param aLow Indices here and lower do not contain the needle.\n\t * @param aHigh Indices here and higher do not contain the needle.\n\t * @param aNeedle The element being searched for.\n\t * @param aHaystack The non-empty array being searched.\n\t * @param aCompare Function which takes two elements and returns -1, 0, or 1.\n\t * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n\t * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t */\n\tfunction recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {\n\t // This function terminates when one of the following is true:\n\t //\n\t // 1. We find the exact element we are looking for.\n\t //\n\t // 2. We did not find the exact element, but we can return the index of\n\t // the next-closest element.\n\t //\n\t // 3. We did not find the exact element, and there is no next-closest\n\t // element than the one we are searching for, so we return -1.\n\t var mid = Math.floor((aHigh - aLow) / 2) + aLow;\n\t var cmp = aCompare(aNeedle, aHaystack[mid], true);\n\t if (cmp === 0) {\n\t // Found the element we are looking for.\n\t return mid;\n\t }\n\t else if (cmp > 0) {\n\t // Our needle is greater than aHaystack[mid].\n\t if (aHigh - mid > 1) {\n\t // The element is in the upper half.\n\t return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);\n\t }\n\t\n\t // The exact needle element was not found in this haystack. Determine if\n\t // we are in termination case (3) or (2) and return the appropriate thing.\n\t if (aBias == exports.LEAST_UPPER_BOUND) {\n\t return aHigh < aHaystack.length ? aHigh : -1;\n\t } else {\n\t return mid;\n\t }\n\t }\n\t else {\n\t // Our needle is less than aHaystack[mid].\n\t if (mid - aLow > 1) {\n\t // The element is in the lower half.\n\t return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);\n\t }\n\t\n\t // we are in termination case (3) or (2) and return the appropriate thing.\n\t if (aBias == exports.LEAST_UPPER_BOUND) {\n\t return mid;\n\t } else {\n\t return aLow < 0 ? -1 : aLow;\n\t }\n\t }\n\t}\n\t\n\t/**\n\t * This is an implementation of binary search which will always try and return\n\t * the index of the closest element if there is no exact hit. This is because\n\t * mappings between original and generated line/col pairs are single points,\n\t * and there is an implicit region between each of them, so a miss just means\n\t * that you aren't on the very start of a region.\n\t *\n\t * @param aNeedle The element you are looking for.\n\t * @param aHaystack The array that is being searched.\n\t * @param aCompare A function which takes the needle and an element in the\n\t * array and returns -1, 0, or 1 depending on whether the needle is less\n\t * than, equal to, or greater than the element, respectively.\n\t * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n\t * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.\n\t */\n\texports.search = function search(aNeedle, aHaystack, aCompare, aBias) {\n\t if (aHaystack.length === 0) {\n\t return -1;\n\t }\n\t\n\t var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,\n\t aCompare, aBias || exports.GREATEST_LOWER_BOUND);\n\t if (index < 0) {\n\t return -1;\n\t }\n\t\n\t // We have found either the exact element, or the next-closest element than\n\t // the one we are searching for. However, there may be more than one such\n\t // element. Make sure we always return the smallest of these.\n\t while (index - 1 >= 0) {\n\t if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {\n\t break;\n\t }\n\t --index;\n\t }\n\t\n\t return index;\n\t};\n\n\n/***/ }),\n/* 9 */\n/***/ (function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\t// It turns out that some (most?) JavaScript engines don't self-host\n\t// `Array.prototype.sort`. This makes sense because C++ will likely remain\n\t// faster than JS when doing raw CPU-intensive sorting. However, when using a\n\t// custom comparator function, calling back and forth between the VM's C++ and\n\t// JIT'd JS is rather slow *and* loses JIT type information, resulting in\n\t// worse generated code for the comparator function than would be optimal. In\n\t// fact, when sorting with a comparator, these costs outweigh the benefits of\n\t// sorting in C++. By using our own JS-implemented Quick Sort (below), we get\n\t// a ~3500ms mean speed-up in `bench/bench.html`.\n\t\n\t/**\n\t * Swap the elements indexed by `x` and `y` in the array `ary`.\n\t *\n\t * @param {Array} ary\n\t * The array.\n\t * @param {Number} x\n\t * The index of the first item.\n\t * @param {Number} y\n\t * The index of the second item.\n\t */\n\tfunction swap(ary, x, y) {\n\t var temp = ary[x];\n\t ary[x] = ary[y];\n\t ary[y] = temp;\n\t}\n\t\n\t/**\n\t * Returns a random integer within the range `low .. high` inclusive.\n\t *\n\t * @param {Number} low\n\t * The lower bound on the range.\n\t * @param {Number} high\n\t * The upper bound on the range.\n\t */\n\tfunction randomIntInRange(low, high) {\n\t return Math.round(low + (Math.random() * (high - low)));\n\t}\n\t\n\t/**\n\t * The Quick Sort algorithm.\n\t *\n\t * @param {Array} ary\n\t * An array to sort.\n\t * @param {function} comparator\n\t * Function to use to compare two items.\n\t * @param {Number} p\n\t * Start index of the array\n\t * @param {Number} r\n\t * End index of the array\n\t */\n\tfunction doQuickSort(ary, comparator, p, r) {\n\t // If our lower bound is less than our upper bound, we (1) partition the\n\t // array into two pieces and (2) recurse on each half. If it is not, this is\n\t // the empty array and our base case.\n\t\n\t if (p < r) {\n\t // (1) Partitioning.\n\t //\n\t // The partitioning chooses a pivot between `p` and `r` and moves all\n\t // elements that are less than or equal to the pivot to the before it, and\n\t // all the elements that are greater than it after it. The effect is that\n\t // once partition is done, the pivot is in the exact place it will be when\n\t // the array is put in sorted order, and it will not need to be moved\n\t // again. This runs in O(n) time.\n\t\n\t // Always choose a random pivot so that an input array which is reverse\n\t // sorted does not cause O(n^2) running time.\n\t var pivotIndex = randomIntInRange(p, r);\n\t var i = p - 1;\n\t\n\t swap(ary, pivotIndex, r);\n\t var pivot = ary[r];\n\t\n\t // Immediately after `j` is incremented in this loop, the following hold\n\t // true:\n\t //\n\t // * Every element in `ary[p .. i]` is less than or equal to the pivot.\n\t //\n\t // * Every element in `ary[i+1 .. j-1]` is greater than the pivot.\n\t for (var j = p; j < r; j++) {\n\t if (comparator(ary[j], pivot) <= 0) {\n\t i += 1;\n\t swap(ary, i, j);\n\t }\n\t }\n\t\n\t swap(ary, i + 1, j);\n\t var q = i + 1;\n\t\n\t // (2) Recurse on each half.\n\t\n\t doQuickSort(ary, comparator, p, q - 1);\n\t doQuickSort(ary, comparator, q + 1, r);\n\t }\n\t}\n\t\n\t/**\n\t * Sort the given array in-place with the given comparator function.\n\t *\n\t * @param {Array} ary\n\t * An array to sort.\n\t * @param {function} comparator\n\t * Function to use to compare two items.\n\t */\n\texports.quickSort = function (ary, comparator) {\n\t doQuickSort(ary, comparator, 0, ary.length - 1);\n\t};\n\n\n/***/ }),\n/* 10 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar SourceMapGenerator = __webpack_require__(1).SourceMapGenerator;\n\tvar util = __webpack_require__(4);\n\t\n\t// Matches a Windows-style `\\r\\n` newline or a `\\n` newline used by all other\n\t// operating systems these days (capturing the result).\n\tvar REGEX_NEWLINE = /(\\r?\\n)/;\n\t\n\t// Newline character code for charCodeAt() comparisons\n\tvar NEWLINE_CODE = 10;\n\t\n\t// Private symbol for identifying `SourceNode`s when multiple versions of\n\t// the source-map library are loaded. This MUST NOT CHANGE across\n\t// versions!\n\tvar isSourceNode = \"$$$isSourceNode$$$\";\n\t\n\t/**\n\t * SourceNodes provide a way to abstract over interpolating/concatenating\n\t * snippets of generated JavaScript source code while maintaining the line and\n\t * column information associated with the original source code.\n\t *\n\t * @param aLine The original line number.\n\t * @param aColumn The original column number.\n\t * @param aSource The original source's filename.\n\t * @param aChunks Optional. An array of strings which are snippets of\n\t * generated JS, or other SourceNodes.\n\t * @param aName The original identifier.\n\t */\n\tfunction SourceNode(aLine, aColumn, aSource, aChunks, aName) {\n\t this.children = [];\n\t this.sourceContents = {};\n\t this.line = aLine == null ? null : aLine;\n\t this.column = aColumn == null ? null : aColumn;\n\t this.source = aSource == null ? null : aSource;\n\t this.name = aName == null ? null : aName;\n\t this[isSourceNode] = true;\n\t if (aChunks != null) this.add(aChunks);\n\t}\n\t\n\t/**\n\t * Creates a SourceNode from generated code and a SourceMapConsumer.\n\t *\n\t * @param aGeneratedCode The generated code\n\t * @param aSourceMapConsumer The SourceMap for the generated code\n\t * @param aRelativePath Optional. The path that relative sources in the\n\t * SourceMapConsumer should be relative to.\n\t */\n\tSourceNode.fromStringWithSourceMap =\n\t function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {\n\t // The SourceNode we want to fill with the generated code\n\t // and the SourceMap\n\t var node = new SourceNode();\n\t\n\t // All even indices of this array are one line of the generated code,\n\t // while all odd indices are the newlines between two adjacent lines\n\t // (since `REGEX_NEWLINE` captures its match).\n\t // Processed fragments are accessed by calling `shiftNextLine`.\n\t var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);\n\t var remainingLinesIndex = 0;\n\t var shiftNextLine = function() {\n\t var lineContents = getNextLine();\n\t // The last line of a file might not have a newline.\n\t var newLine = getNextLine() || \"\";\n\t return lineContents + newLine;\n\t\n\t function getNextLine() {\n\t return remainingLinesIndex < remainingLines.length ?\n\t remainingLines[remainingLinesIndex++] : undefined;\n\t }\n\t };\n\t\n\t // We need to remember the position of \"remainingLines\"\n\t var lastGeneratedLine = 1, lastGeneratedColumn = 0;\n\t\n\t // The generate SourceNodes we need a code range.\n\t // To extract it current and last mapping is used.\n\t // Here we store the last mapping.\n\t var lastMapping = null;\n\t\n\t aSourceMapConsumer.eachMapping(function (mapping) {\n\t if (lastMapping !== null) {\n\t // We add the code from \"lastMapping\" to \"mapping\":\n\t // First check if there is a new line in between.\n\t if (lastGeneratedLine < mapping.generatedLine) {\n\t // Associate first line with \"lastMapping\"\n\t addMappingWithCode(lastMapping, shiftNextLine());\n\t lastGeneratedLine++;\n\t lastGeneratedColumn = 0;\n\t // The remaining code is added without mapping\n\t } else {\n\t // There is no new line in between.\n\t // Associate the code between \"lastGeneratedColumn\" and\n\t // \"mapping.generatedColumn\" with \"lastMapping\"\n\t var nextLine = remainingLines[remainingLinesIndex];\n\t var code = nextLine.substr(0, mapping.generatedColumn -\n\t lastGeneratedColumn);\n\t remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn -\n\t lastGeneratedColumn);\n\t lastGeneratedColumn = mapping.generatedColumn;\n\t addMappingWithCode(lastMapping, code);\n\t // No more remaining code, continue\n\t lastMapping = mapping;\n\t return;\n\t }\n\t }\n\t // We add the generated code until the first mapping\n\t // to the SourceNode without any mapping.\n\t // Each line is added as separate string.\n\t while (lastGeneratedLine < mapping.generatedLine) {\n\t node.add(shiftNextLine());\n\t lastGeneratedLine++;\n\t }\n\t if (lastGeneratedColumn < mapping.generatedColumn) {\n\t var nextLine = remainingLines[remainingLinesIndex];\n\t node.add(nextLine.substr(0, mapping.generatedColumn));\n\t remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn);\n\t lastGeneratedColumn = mapping.generatedColumn;\n\t }\n\t lastMapping = mapping;\n\t }, this);\n\t // We have processed all mappings.\n\t if (remainingLinesIndex < remainingLines.length) {\n\t if (lastMapping) {\n\t // Associate the remaining code in the current line with \"lastMapping\"\n\t addMappingWithCode(lastMapping, shiftNextLine());\n\t }\n\t // and add the remaining lines without any mapping\n\t node.add(remainingLines.splice(remainingLinesIndex).join(\"\"));\n\t }\n\t\n\t // Copy sourcesContent into SourceNode\n\t aSourceMapConsumer.sources.forEach(function (sourceFile) {\n\t var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n\t if (content != null) {\n\t if (aRelativePath != null) {\n\t sourceFile = util.join(aRelativePath, sourceFile);\n\t }\n\t node.setSourceContent(sourceFile, content);\n\t }\n\t });\n\t\n\t return node;\n\t\n\t function addMappingWithCode(mapping, code) {\n\t if (mapping === null || mapping.source === undefined) {\n\t node.add(code);\n\t } else {\n\t var source = aRelativePath\n\t ? util.join(aRelativePath, mapping.source)\n\t : mapping.source;\n\t node.add(new SourceNode(mapping.originalLine,\n\t mapping.originalColumn,\n\t source,\n\t code,\n\t mapping.name));\n\t }\n\t }\n\t };\n\t\n\t/**\n\t * Add a chunk of generated JS to this source node.\n\t *\n\t * @param aChunk A string snippet of generated JS code, another instance of\n\t * SourceNode, or an array where each member is one of those things.\n\t */\n\tSourceNode.prototype.add = function SourceNode_add(aChunk) {\n\t if (Array.isArray(aChunk)) {\n\t aChunk.forEach(function (chunk) {\n\t this.add(chunk);\n\t }, this);\n\t }\n\t else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n\t if (aChunk) {\n\t this.children.push(aChunk);\n\t }\n\t }\n\t else {\n\t throw new TypeError(\n\t \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n\t );\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Add a chunk of generated JS to the beginning of this source node.\n\t *\n\t * @param aChunk A string snippet of generated JS code, another instance of\n\t * SourceNode, or an array where each member is one of those things.\n\t */\n\tSourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {\n\t if (Array.isArray(aChunk)) {\n\t for (var i = aChunk.length-1; i >= 0; i--) {\n\t this.prepend(aChunk[i]);\n\t }\n\t }\n\t else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n\t this.children.unshift(aChunk);\n\t }\n\t else {\n\t throw new TypeError(\n\t \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n\t );\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Walk over the tree of JS snippets in this node and its children. The\n\t * walking function is called once for each snippet of JS and is passed that\n\t * snippet and the its original associated source's line/column location.\n\t *\n\t * @param aFn The traversal function.\n\t */\n\tSourceNode.prototype.walk = function SourceNode_walk(aFn) {\n\t var chunk;\n\t for (var i = 0, len = this.children.length; i < len; i++) {\n\t chunk = this.children[i];\n\t if (chunk[isSourceNode]) {\n\t chunk.walk(aFn);\n\t }\n\t else {\n\t if (chunk !== '') {\n\t aFn(chunk, { source: this.source,\n\t line: this.line,\n\t column: this.column,\n\t name: this.name });\n\t }\n\t }\n\t }\n\t};\n\t\n\t/**\n\t * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between\n\t * each of `this.children`.\n\t *\n\t * @param aSep The separator.\n\t */\n\tSourceNode.prototype.join = function SourceNode_join(aSep) {\n\t var newChildren;\n\t var i;\n\t var len = this.children.length;\n\t if (len > 0) {\n\t newChildren = [];\n\t for (i = 0; i < len-1; i++) {\n\t newChildren.push(this.children[i]);\n\t newChildren.push(aSep);\n\t }\n\t newChildren.push(this.children[i]);\n\t this.children = newChildren;\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Call String.prototype.replace on the very right-most source snippet. Useful\n\t * for trimming whitespace from the end of a source node, etc.\n\t *\n\t * @param aPattern The pattern to replace.\n\t * @param aReplacement The thing to replace the pattern with.\n\t */\n\tSourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {\n\t var lastChild = this.children[this.children.length - 1];\n\t if (lastChild[isSourceNode]) {\n\t lastChild.replaceRight(aPattern, aReplacement);\n\t }\n\t else if (typeof lastChild === 'string') {\n\t this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);\n\t }\n\t else {\n\t this.children.push(''.replace(aPattern, aReplacement));\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Set the source content for a source file. This will be added to the SourceMapGenerator\n\t * in the sourcesContent field.\n\t *\n\t * @param aSourceFile The filename of the source file\n\t * @param aSourceContent The content of the source file\n\t */\n\tSourceNode.prototype.setSourceContent =\n\t function SourceNode_setSourceContent(aSourceFile, aSourceContent) {\n\t this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;\n\t };\n\t\n\t/**\n\t * Walk over the tree of SourceNodes. The walking function is called for each\n\t * source file content and is passed the filename and source content.\n\t *\n\t * @param aFn The traversal function.\n\t */\n\tSourceNode.prototype.walkSourceContents =\n\t function SourceNode_walkSourceContents(aFn) {\n\t for (var i = 0, len = this.children.length; i < len; i++) {\n\t if (this.children[i][isSourceNode]) {\n\t this.children[i].walkSourceContents(aFn);\n\t }\n\t }\n\t\n\t var sources = Object.keys(this.sourceContents);\n\t for (var i = 0, len = sources.length; i < len; i++) {\n\t aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);\n\t }\n\t };\n\t\n\t/**\n\t * Return the string representation of this source node. Walks over the tree\n\t * and concatenates all the various snippets together to one string.\n\t */\n\tSourceNode.prototype.toString = function SourceNode_toString() {\n\t var str = \"\";\n\t this.walk(function (chunk) {\n\t str += chunk;\n\t });\n\t return str;\n\t};\n\t\n\t/**\n\t * Returns the string representation of this source node along with a source\n\t * map.\n\t */\n\tSourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {\n\t var generated = {\n\t code: \"\",\n\t line: 1,\n\t column: 0\n\t };\n\t var map = new SourceMapGenerator(aArgs);\n\t var sourceMappingActive = false;\n\t var lastOriginalSource = null;\n\t var lastOriginalLine = null;\n\t var lastOriginalColumn = null;\n\t var lastOriginalName = null;\n\t this.walk(function (chunk, original) {\n\t generated.code += chunk;\n\t if (original.source !== null\n\t && original.line !== null\n\t && original.column !== null) {\n\t if(lastOriginalSource !== original.source\n\t || lastOriginalLine !== original.line\n\t || lastOriginalColumn !== original.column\n\t || lastOriginalName !== original.name) {\n\t map.addMapping({\n\t source: original.source,\n\t original: {\n\t line: original.line,\n\t column: original.column\n\t },\n\t generated: {\n\t line: generated.line,\n\t column: generated.column\n\t },\n\t name: original.name\n\t });\n\t }\n\t lastOriginalSource = original.source;\n\t lastOriginalLine = original.line;\n\t lastOriginalColumn = original.column;\n\t lastOriginalName = original.name;\n\t sourceMappingActive = true;\n\t } else if (sourceMappingActive) {\n\t map.addMapping({\n\t generated: {\n\t line: generated.line,\n\t column: generated.column\n\t }\n\t });\n\t lastOriginalSource = null;\n\t sourceMappingActive = false;\n\t }\n\t for (var idx = 0, length = chunk.length; idx < length; idx++) {\n\t if (chunk.charCodeAt(idx) === NEWLINE_CODE) {\n\t generated.line++;\n\t generated.column = 0;\n\t // Mappings end at eol\n\t if (idx + 1 === length) {\n\t lastOriginalSource = null;\n\t sourceMappingActive = false;\n\t } else if (sourceMappingActive) {\n\t map.addMapping({\n\t source: original.source,\n\t original: {\n\t line: original.line,\n\t column: original.column\n\t },\n\t generated: {\n\t line: generated.line,\n\t column: generated.column\n\t },\n\t name: original.name\n\t });\n\t }\n\t } else {\n\t generated.column++;\n\t }\n\t }\n\t });\n\t this.walkSourceContents(function (sourceFile, sourceContent) {\n\t map.setSourceContent(sourceFile, sourceContent);\n\t });\n\t\n\t return { code: generated.code, map: map };\n\t};\n\t\n\texports.SourceNode = SourceNode;\n\n\n/***/ })\n/******/ ])\n});\n;\n\n\n// WEBPACK FOOTER //\n// source-map.min.js"," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId])\n \t\t\treturn installedModules[moduleId].exports;\n\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\texports: {},\n \t\t\tid: moduleId,\n \t\t\tloaded: false\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.loaded = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(0);\n\n\n\n// WEBPACK FOOTER //\n// webpack/bootstrap 42c329f865e32e011afb","/*\n * Copyright 2009-2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE.txt or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\nexports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator;\nexports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer;\nexports.SourceNode = require('./lib/source-node').SourceNode;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./source-map.js\n// module id = 0\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar base64VLQ = require('./base64-vlq');\nvar util = require('./util');\nvar ArraySet = require('./array-set').ArraySet;\nvar MappingList = require('./mapping-list').MappingList;\n\n/**\n * An instance of the SourceMapGenerator represents a source map which is\n * being built incrementally. You may pass an object with the following\n * properties:\n *\n * - file: The filename of the generated source.\n * - sourceRoot: A root for all relative URLs in this source map.\n */\nfunction SourceMapGenerator(aArgs) {\n if (!aArgs) {\n aArgs = {};\n }\n this._file = util.getArg(aArgs, 'file', null);\n this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);\n this._skipValidation = util.getArg(aArgs, 'skipValidation', false);\n this._sources = new ArraySet();\n this._names = new ArraySet();\n this._mappings = new MappingList();\n this._sourcesContents = null;\n}\n\nSourceMapGenerator.prototype._version = 3;\n\n/**\n * Creates a new SourceMapGenerator based on a SourceMapConsumer\n *\n * @param aSourceMapConsumer The SourceMap.\n */\nSourceMapGenerator.fromSourceMap =\n function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {\n var sourceRoot = aSourceMapConsumer.sourceRoot;\n var generator = new SourceMapGenerator({\n file: aSourceMapConsumer.file,\n sourceRoot: sourceRoot\n });\n aSourceMapConsumer.eachMapping(function (mapping) {\n var newMapping = {\n generated: {\n line: mapping.generatedLine,\n column: mapping.generatedColumn\n }\n };\n\n if (mapping.source != null) {\n newMapping.source = mapping.source;\n if (sourceRoot != null) {\n newMapping.source = util.relative(sourceRoot, newMapping.source);\n }\n\n newMapping.original = {\n line: mapping.originalLine,\n column: mapping.originalColumn\n };\n\n if (mapping.name != null) {\n newMapping.name = mapping.name;\n }\n }\n\n generator.addMapping(newMapping);\n });\n aSourceMapConsumer.sources.forEach(function (sourceFile) {\n var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n if (content != null) {\n generator.setSourceContent(sourceFile, content);\n }\n });\n return generator;\n };\n\n/**\n * Add a single mapping from original source line and column to the generated\n * source's line and column for this source map being created. The mapping\n * object should have the following properties:\n *\n * - generated: An object with the generated line and column positions.\n * - original: An object with the original line and column positions.\n * - source: The original source file (relative to the sourceRoot).\n * - name: An optional original token name for this mapping.\n */\nSourceMapGenerator.prototype.addMapping =\n function SourceMapGenerator_addMapping(aArgs) {\n var generated = util.getArg(aArgs, 'generated');\n var original = util.getArg(aArgs, 'original', null);\n var source = util.getArg(aArgs, 'source', null);\n var name = util.getArg(aArgs, 'name', null);\n\n if (!this._skipValidation) {\n this._validateMapping(generated, original, source, name);\n }\n\n if (source != null) {\n source = String(source);\n if (!this._sources.has(source)) {\n this._sources.add(source);\n }\n }\n\n if (name != null) {\n name = String(name);\n if (!this._names.has(name)) {\n this._names.add(name);\n }\n }\n\n this._mappings.add({\n generatedLine: generated.line,\n generatedColumn: generated.column,\n originalLine: original != null && original.line,\n originalColumn: original != null && original.column,\n source: source,\n name: name\n });\n };\n\n/**\n * Set the source content for a source file.\n */\nSourceMapGenerator.prototype.setSourceContent =\n function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {\n var source = aSourceFile;\n if (this._sourceRoot != null) {\n source = util.relative(this._sourceRoot, source);\n }\n\n if (aSourceContent != null) {\n // Add the source content to the _sourcesContents map.\n // Create a new _sourcesContents map if the property is null.\n if (!this._sourcesContents) {\n this._sourcesContents = Object.create(null);\n }\n this._sourcesContents[util.toSetString(source)] = aSourceContent;\n } else if (this._sourcesContents) {\n // Remove the source file from the _sourcesContents map.\n // If the _sourcesContents map is empty, set the property to null.\n delete this._sourcesContents[util.toSetString(source)];\n if (Object.keys(this._sourcesContents).length === 0) {\n this._sourcesContents = null;\n }\n }\n };\n\n/**\n * Applies the mappings of a sub-source-map for a specific source file to the\n * source map being generated. Each mapping to the supplied source file is\n * rewritten using the supplied source map. Note: The resolution for the\n * resulting mappings is the minimium of this map and the supplied map.\n *\n * @param aSourceMapConsumer The source map to be applied.\n * @param aSourceFile Optional. The filename of the source file.\n * If omitted, SourceMapConsumer's file property will be used.\n * @param aSourceMapPath Optional. The dirname of the path to the source map\n * to be applied. If relative, it is relative to the SourceMapConsumer.\n * This parameter is needed when the two source maps aren't in the same\n * directory, and the source map to be applied contains relative source\n * paths. If so, those relative source paths need to be rewritten\n * relative to the SourceMapGenerator.\n */\nSourceMapGenerator.prototype.applySourceMap =\n function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {\n var sourceFile = aSourceFile;\n // If aSourceFile is omitted, we will use the file property of the SourceMap\n if (aSourceFile == null) {\n if (aSourceMapConsumer.file == null) {\n throw new Error(\n 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +\n 'or the source map\\'s \"file\" property. Both were omitted.'\n );\n }\n sourceFile = aSourceMapConsumer.file;\n }\n var sourceRoot = this._sourceRoot;\n // Make \"sourceFile\" relative if an absolute Url is passed.\n if (sourceRoot != null) {\n sourceFile = util.relative(sourceRoot, sourceFile);\n }\n // Applying the SourceMap can add and remove items from the sources and\n // the names array.\n var newSources = new ArraySet();\n var newNames = new ArraySet();\n\n // Find mappings for the \"sourceFile\"\n this._mappings.unsortedForEach(function (mapping) {\n if (mapping.source === sourceFile && mapping.originalLine != null) {\n // Check if it can be mapped by the source map, then update the mapping.\n var original = aSourceMapConsumer.originalPositionFor({\n line: mapping.originalLine,\n column: mapping.originalColumn\n });\n if (original.source != null) {\n // Copy mapping\n mapping.source = original.source;\n if (aSourceMapPath != null) {\n mapping.source = util.join(aSourceMapPath, mapping.source)\n }\n if (sourceRoot != null) {\n mapping.source = util.relative(sourceRoot, mapping.source);\n }\n mapping.originalLine = original.line;\n mapping.originalColumn = original.column;\n if (original.name != null) {\n mapping.name = original.name;\n }\n }\n }\n\n var source = mapping.source;\n if (source != null && !newSources.has(source)) {\n newSources.add(source);\n }\n\n var name = mapping.name;\n if (name != null && !newNames.has(name)) {\n newNames.add(name);\n }\n\n }, this);\n this._sources = newSources;\n this._names = newNames;\n\n // Copy sourcesContents of applied map.\n aSourceMapConsumer.sources.forEach(function (sourceFile) {\n var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n if (content != null) {\n if (aSourceMapPath != null) {\n sourceFile = util.join(aSourceMapPath, sourceFile);\n }\n if (sourceRoot != null) {\n sourceFile = util.relative(sourceRoot, sourceFile);\n }\n this.setSourceContent(sourceFile, content);\n }\n }, this);\n };\n\n/**\n * A mapping can have one of the three levels of data:\n *\n * 1. Just the generated position.\n * 2. The Generated position, original position, and original source.\n * 3. Generated and original position, original source, as well as a name\n * token.\n *\n * To maintain consistency, we validate that any new mapping being added falls\n * in to one of these categories.\n */\nSourceMapGenerator.prototype._validateMapping =\n function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,\n aName) {\n // When aOriginal is truthy but has empty values for .line and .column,\n // it is most likely a programmer error. In this case we throw a very\n // specific error message to try to guide them the right way.\n // For example: https://github.com/Polymer/polymer-bundler/pull/519\n if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') {\n throw new Error(\n 'original.line and original.column are not numbers -- you probably meant to omit ' +\n 'the original mapping entirely and only map the generated position. If so, pass ' +\n 'null for the original mapping instead of an object with empty or null values.'\n );\n }\n\n if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n && aGenerated.line > 0 && aGenerated.column >= 0\n && !aOriginal && !aSource && !aName) {\n // Case 1.\n return;\n }\n else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n && aOriginal && 'line' in aOriginal && 'column' in aOriginal\n && aGenerated.line > 0 && aGenerated.column >= 0\n && aOriginal.line > 0 && aOriginal.column >= 0\n && aSource) {\n // Cases 2 and 3.\n return;\n }\n else {\n throw new Error('Invalid mapping: ' + JSON.stringify({\n generated: aGenerated,\n source: aSource,\n original: aOriginal,\n name: aName\n }));\n }\n };\n\n/**\n * Serialize the accumulated mappings in to the stream of base 64 VLQs\n * specified by the source map format.\n */\nSourceMapGenerator.prototype._serializeMappings =\n function SourceMapGenerator_serializeMappings() {\n var previousGeneratedColumn = 0;\n var previousGeneratedLine = 1;\n var previousOriginalColumn = 0;\n var previousOriginalLine = 0;\n var previousName = 0;\n var previousSource = 0;\n var result = '';\n var next;\n var mapping;\n var nameIdx;\n var sourceIdx;\n\n var mappings = this._mappings.toArray();\n for (var i = 0, len = mappings.length; i < len; i++) {\n mapping = mappings[i];\n next = ''\n\n if (mapping.generatedLine !== previousGeneratedLine) {\n previousGeneratedColumn = 0;\n while (mapping.generatedLine !== previousGeneratedLine) {\n next += ';';\n previousGeneratedLine++;\n }\n }\n else {\n if (i > 0) {\n if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {\n continue;\n }\n next += ',';\n }\n }\n\n next += base64VLQ.encode(mapping.generatedColumn\n - previousGeneratedColumn);\n previousGeneratedColumn = mapping.generatedColumn;\n\n if (mapping.source != null) {\n sourceIdx = this._sources.indexOf(mapping.source);\n next += base64VLQ.encode(sourceIdx - previousSource);\n previousSource = sourceIdx;\n\n // lines are stored 0-based in SourceMap spec version 3\n next += base64VLQ.encode(mapping.originalLine - 1\n - previousOriginalLine);\n previousOriginalLine = mapping.originalLine - 1;\n\n next += base64VLQ.encode(mapping.originalColumn\n - previousOriginalColumn);\n previousOriginalColumn = mapping.originalColumn;\n\n if (mapping.name != null) {\n nameIdx = this._names.indexOf(mapping.name);\n next += base64VLQ.encode(nameIdx - previousName);\n previousName = nameIdx;\n }\n }\n\n result += next;\n }\n\n return result;\n };\n\nSourceMapGenerator.prototype._generateSourcesContent =\n function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {\n return aSources.map(function (source) {\n if (!this._sourcesContents) {\n return null;\n }\n if (aSourceRoot != null) {\n source = util.relative(aSourceRoot, source);\n }\n var key = util.toSetString(source);\n return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)\n ? this._sourcesContents[key]\n : null;\n }, this);\n };\n\n/**\n * Externalize the source map.\n */\nSourceMapGenerator.prototype.toJSON =\n function SourceMapGenerator_toJSON() {\n var map = {\n version: this._version,\n sources: this._sources.toArray(),\n names: this._names.toArray(),\n mappings: this._serializeMappings()\n };\n if (this._file != null) {\n map.file = this._file;\n }\n if (this._sourceRoot != null) {\n map.sourceRoot = this._sourceRoot;\n }\n if (this._sourcesContents) {\n map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);\n }\n\n return map;\n };\n\n/**\n * Render the source map being generated to a string.\n */\nSourceMapGenerator.prototype.toString =\n function SourceMapGenerator_toString() {\n return JSON.stringify(this.toJSON());\n };\n\nexports.SourceMapGenerator = SourceMapGenerator;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-map-generator.js\n// module id = 1\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n *\n * Based on the Base 64 VLQ implementation in Closure Compiler:\n * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java\n *\n * Copyright 2011 The Closure Compiler Authors. All rights reserved.\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are\n * met:\n *\n * * Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * * Redistributions in binary form must reproduce the above\n * copyright notice, this list of conditions and the following\n * disclaimer in the documentation and/or other materials provided\n * with the distribution.\n * * Neither the name of Google Inc. nor the names of its\n * contributors may be used to endorse or promote products derived\n * from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n * \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\nvar base64 = require('./base64');\n\n// A single base 64 digit can contain 6 bits of data. For the base 64 variable\n// length quantities we use in the source map spec, the first bit is the sign,\n// the next four bits are the actual value, and the 6th bit is the\n// continuation bit. The continuation bit tells us whether there are more\n// digits in this value following this digit.\n//\n// Continuation\n// | Sign\n// | |\n// V V\n// 101011\n\nvar VLQ_BASE_SHIFT = 5;\n\n// binary: 100000\nvar VLQ_BASE = 1 << VLQ_BASE_SHIFT;\n\n// binary: 011111\nvar VLQ_BASE_MASK = VLQ_BASE - 1;\n\n// binary: 100000\nvar VLQ_CONTINUATION_BIT = VLQ_BASE;\n\n/**\n * Converts from a two-complement value to a value where the sign bit is\n * placed in the least significant bit. For example, as decimals:\n * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)\n * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)\n */\nfunction toVLQSigned(aValue) {\n return aValue < 0\n ? ((-aValue) << 1) + 1\n : (aValue << 1) + 0;\n}\n\n/**\n * Converts to a two-complement value from a value where the sign bit is\n * placed in the least significant bit. For example, as decimals:\n * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1\n * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2\n */\nfunction fromVLQSigned(aValue) {\n var isNegative = (aValue & 1) === 1;\n var shifted = aValue >> 1;\n return isNegative\n ? -shifted\n : shifted;\n}\n\n/**\n * Returns the base 64 VLQ encoded value.\n */\nexports.encode = function base64VLQ_encode(aValue) {\n var encoded = \"\";\n var digit;\n\n var vlq = toVLQSigned(aValue);\n\n do {\n digit = vlq & VLQ_BASE_MASK;\n vlq >>>= VLQ_BASE_SHIFT;\n if (vlq > 0) {\n // There are still more digits in this value, so we must make sure the\n // continuation bit is marked.\n digit |= VLQ_CONTINUATION_BIT;\n }\n encoded += base64.encode(digit);\n } while (vlq > 0);\n\n return encoded;\n};\n\n/**\n * Decodes the next base 64 VLQ value from the given string and returns the\n * value and the rest of the string via the out parameter.\n */\nexports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {\n var strLen = aStr.length;\n var result = 0;\n var shift = 0;\n var continuation, digit;\n\n do {\n if (aIndex >= strLen) {\n throw new Error(\"Expected more digits in base 64 VLQ value.\");\n }\n\n digit = base64.decode(aStr.charCodeAt(aIndex++));\n if (digit === -1) {\n throw new Error(\"Invalid base64 digit: \" + aStr.charAt(aIndex - 1));\n }\n\n continuation = !!(digit & VLQ_CONTINUATION_BIT);\n digit &= VLQ_BASE_MASK;\n result = result + (digit << shift);\n shift += VLQ_BASE_SHIFT;\n } while (continuation);\n\n aOutParam.value = fromVLQSigned(result);\n aOutParam.rest = aIndex;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/base64-vlq.js\n// module id = 2\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');\n\n/**\n * Encode an integer in the range of 0 to 63 to a single base 64 digit.\n */\nexports.encode = function (number) {\n if (0 <= number && number < intToCharMap.length) {\n return intToCharMap[number];\n }\n throw new TypeError(\"Must be between 0 and 63: \" + number);\n};\n\n/**\n * Decode a single base 64 character code digit to an integer. Returns -1 on\n * failure.\n */\nexports.decode = function (charCode) {\n var bigA = 65; // 'A'\n var bigZ = 90; // 'Z'\n\n var littleA = 97; // 'a'\n var littleZ = 122; // 'z'\n\n var zero = 48; // '0'\n var nine = 57; // '9'\n\n var plus = 43; // '+'\n var slash = 47; // '/'\n\n var littleOffset = 26;\n var numberOffset = 52;\n\n // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ\n if (bigA <= charCode && charCode <= bigZ) {\n return (charCode - bigA);\n }\n\n // 26 - 51: abcdefghijklmnopqrstuvwxyz\n if (littleA <= charCode && charCode <= littleZ) {\n return (charCode - littleA + littleOffset);\n }\n\n // 52 - 61: 0123456789\n if (zero <= charCode && charCode <= nine) {\n return (charCode - zero + numberOffset);\n }\n\n // 62: +\n if (charCode == plus) {\n return 62;\n }\n\n // 63: /\n if (charCode == slash) {\n return 63;\n }\n\n // Invalid base64 digit.\n return -1;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/base64.js\n// module id = 3\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n/**\n * This is a helper function for getting values from parameter/options\n * objects.\n *\n * @param args The object we are extracting values from\n * @param name The name of the property we are getting.\n * @param defaultValue An optional value to return if the property is missing\n * from the object. If this is not specified and the property is missing, an\n * error will be thrown.\n */\nfunction getArg(aArgs, aName, aDefaultValue) {\n if (aName in aArgs) {\n return aArgs[aName];\n } else if (arguments.length === 3) {\n return aDefaultValue;\n } else {\n throw new Error('\"' + aName + '\" is a required argument.');\n }\n}\nexports.getArg = getArg;\n\nvar urlRegexp = /^(?:([\\w+\\-.]+):)?\\/\\/(?:(\\w+:\\w+)@)?([\\w.]*)(?::(\\d+))?(\\S*)$/;\nvar dataUrlRegexp = /^data:.+\\,.+$/;\n\nfunction urlParse(aUrl) {\n var match = aUrl.match(urlRegexp);\n if (!match) {\n return null;\n }\n return {\n scheme: match[1],\n auth: match[2],\n host: match[3],\n port: match[4],\n path: match[5]\n };\n}\nexports.urlParse = urlParse;\n\nfunction urlGenerate(aParsedUrl) {\n var url = '';\n if (aParsedUrl.scheme) {\n url += aParsedUrl.scheme + ':';\n }\n url += '//';\n if (aParsedUrl.auth) {\n url += aParsedUrl.auth + '@';\n }\n if (aParsedUrl.host) {\n url += aParsedUrl.host;\n }\n if (aParsedUrl.port) {\n url += \":\" + aParsedUrl.port\n }\n if (aParsedUrl.path) {\n url += aParsedUrl.path;\n }\n return url;\n}\nexports.urlGenerate = urlGenerate;\n\n/**\n * Normalizes a path, or the path portion of a URL:\n *\n * - Replaces consecutive slashes with one slash.\n * - Removes unnecessary '.' parts.\n * - Removes unnecessary '/..' parts.\n *\n * Based on code in the Node.js 'path' core module.\n *\n * @param aPath The path or url to normalize.\n */\nfunction normalize(aPath) {\n var path = aPath;\n var url = urlParse(aPath);\n if (url) {\n if (!url.path) {\n return aPath;\n }\n path = url.path;\n }\n var isAbsolute = exports.isAbsolute(path);\n\n var parts = path.split(/\\/+/);\n for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {\n part = parts[i];\n if (part === '.') {\n parts.splice(i, 1);\n } else if (part === '..') {\n up++;\n } else if (up > 0) {\n if (part === '') {\n // The first part is blank if the path is absolute. Trying to go\n // above the root is a no-op. Therefore we can remove all '..' parts\n // directly after the root.\n parts.splice(i + 1, up);\n up = 0;\n } else {\n parts.splice(i, 2);\n up--;\n }\n }\n }\n path = parts.join('/');\n\n if (path === '') {\n path = isAbsolute ? '/' : '.';\n }\n\n if (url) {\n url.path = path;\n return urlGenerate(url);\n }\n return path;\n}\nexports.normalize = normalize;\n\n/**\n * Joins two paths/URLs.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be joined with the root.\n *\n * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a\n * scheme-relative URL: Then the scheme of aRoot, if any, is prepended\n * first.\n * - Otherwise aPath is a path. If aRoot is a URL, then its path portion\n * is updated with the result and aRoot is returned. Otherwise the result\n * is returned.\n * - If aPath is absolute, the result is aPath.\n * - Otherwise the two paths are joined with a slash.\n * - Joining for example 'http://' and 'www.example.com' is also supported.\n */\nfunction join(aRoot, aPath) {\n if (aRoot === \"\") {\n aRoot = \".\";\n }\n if (aPath === \"\") {\n aPath = \".\";\n }\n var aPathUrl = urlParse(aPath);\n var aRootUrl = urlParse(aRoot);\n if (aRootUrl) {\n aRoot = aRootUrl.path || '/';\n }\n\n // `join(foo, '//www.example.org')`\n if (aPathUrl && !aPathUrl.scheme) {\n if (aRootUrl) {\n aPathUrl.scheme = aRootUrl.scheme;\n }\n return urlGenerate(aPathUrl);\n }\n\n if (aPathUrl || aPath.match(dataUrlRegexp)) {\n return aPath;\n }\n\n // `join('http://', 'www.example.com')`\n if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {\n aRootUrl.host = aPath;\n return urlGenerate(aRootUrl);\n }\n\n var joined = aPath.charAt(0) === '/'\n ? aPath\n : normalize(aRoot.replace(/\\/+$/, '') + '/' + aPath);\n\n if (aRootUrl) {\n aRootUrl.path = joined;\n return urlGenerate(aRootUrl);\n }\n return joined;\n}\nexports.join = join;\n\nexports.isAbsolute = function (aPath) {\n return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp);\n};\n\n/**\n * Make a path relative to a URL or another path.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be made relative to aRoot.\n */\nfunction relative(aRoot, aPath) {\n if (aRoot === \"\") {\n aRoot = \".\";\n }\n\n aRoot = aRoot.replace(/\\/$/, '');\n\n // It is possible for the path to be above the root. In this case, simply\n // checking whether the root is a prefix of the path won't work. Instead, we\n // need to remove components from the root one by one, until either we find\n // a prefix that fits, or we run out of components to remove.\n var level = 0;\n while (aPath.indexOf(aRoot + '/') !== 0) {\n var index = aRoot.lastIndexOf(\"/\");\n if (index < 0) {\n return aPath;\n }\n\n // If the only part of the root that is left is the scheme (i.e. http://,\n // file:///, etc.), one or more slashes (/), or simply nothing at all, we\n // have exhausted all components, so the path is not relative to the root.\n aRoot = aRoot.slice(0, index);\n if (aRoot.match(/^([^\\/]+:\\/)?\\/*$/)) {\n return aPath;\n }\n\n ++level;\n }\n\n // Make sure we add a \"../\" for each component we removed from the root.\n return Array(level + 1).join(\"../\") + aPath.substr(aRoot.length + 1);\n}\nexports.relative = relative;\n\nvar supportsNullProto = (function () {\n var obj = Object.create(null);\n return !('__proto__' in obj);\n}());\n\nfunction identity (s) {\n return s;\n}\n\n/**\n * Because behavior goes wacky when you set `__proto__` on objects, we\n * have to prefix all the strings in our set with an arbitrary character.\n *\n * See https://github.com/mozilla/source-map/pull/31 and\n * https://github.com/mozilla/source-map/issues/30\n *\n * @param String aStr\n */\nfunction toSetString(aStr) {\n if (isProtoString(aStr)) {\n return '$' + aStr;\n }\n\n return aStr;\n}\nexports.toSetString = supportsNullProto ? identity : toSetString;\n\nfunction fromSetString(aStr) {\n if (isProtoString(aStr)) {\n return aStr.slice(1);\n }\n\n return aStr;\n}\nexports.fromSetString = supportsNullProto ? identity : fromSetString;\n\nfunction isProtoString(s) {\n if (!s) {\n return false;\n }\n\n var length = s.length;\n\n if (length < 9 /* \"__proto__\".length */) {\n return false;\n }\n\n if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||\n s.charCodeAt(length - 2) !== 95 /* '_' */ ||\n s.charCodeAt(length - 3) !== 111 /* 'o' */ ||\n s.charCodeAt(length - 4) !== 116 /* 't' */ ||\n s.charCodeAt(length - 5) !== 111 /* 'o' */ ||\n s.charCodeAt(length - 6) !== 114 /* 'r' */ ||\n s.charCodeAt(length - 7) !== 112 /* 'p' */ ||\n s.charCodeAt(length - 8) !== 95 /* '_' */ ||\n s.charCodeAt(length - 9) !== 95 /* '_' */) {\n return false;\n }\n\n for (var i = length - 10; i >= 0; i--) {\n if (s.charCodeAt(i) !== 36 /* '$' */) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Comparator between two mappings where the original positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same original source/line/column, but different generated\n * line and column the same. Useful when searching for a mapping with a\n * stubbed out mapping.\n */\nfunction compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {\n var cmp = mappingA.source - mappingB.source;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalLine - mappingB.originalLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalColumn - mappingB.originalColumn;\n if (cmp !== 0 || onlyCompareOriginal) {\n return cmp;\n }\n\n cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.generatedLine - mappingB.generatedLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n return mappingA.name - mappingB.name;\n}\nexports.compareByOriginalPositions = compareByOriginalPositions;\n\n/**\n * Comparator between two mappings with deflated source and name indices where\n * the generated positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same generated line and column, but different\n * source/name/original line and column the same. Useful when searching for a\n * mapping with a stubbed out mapping.\n */\nfunction compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {\n var cmp = mappingA.generatedLine - mappingB.generatedLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n if (cmp !== 0 || onlyCompareGenerated) {\n return cmp;\n }\n\n cmp = mappingA.source - mappingB.source;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalLine - mappingB.originalLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalColumn - mappingB.originalColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n return mappingA.name - mappingB.name;\n}\nexports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;\n\nfunction strcmp(aStr1, aStr2) {\n if (aStr1 === aStr2) {\n return 0;\n }\n\n if (aStr1 > aStr2) {\n return 1;\n }\n\n return -1;\n}\n\n/**\n * Comparator between two mappings with inflated source and name strings where\n * the generated positions are compared.\n */\nfunction compareByGeneratedPositionsInflated(mappingA, mappingB) {\n var cmp = mappingA.generatedLine - mappingB.generatedLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = strcmp(mappingA.source, mappingB.source);\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalLine - mappingB.originalLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalColumn - mappingB.originalColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/util.js\n// module id = 4\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar has = Object.prototype.hasOwnProperty;\nvar hasNativeMap = typeof Map !== \"undefined\";\n\n/**\n * A data structure which is a combination of an array and a set. Adding a new\n * member is O(1), testing for membership is O(1), and finding the index of an\n * element is O(1). Removing elements from the set is not supported. Only\n * strings are supported for membership.\n */\nfunction ArraySet() {\n this._array = [];\n this._set = hasNativeMap ? new Map() : Object.create(null);\n}\n\n/**\n * Static method for creating ArraySet instances from an existing array.\n */\nArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {\n var set = new ArraySet();\n for (var i = 0, len = aArray.length; i < len; i++) {\n set.add(aArray[i], aAllowDuplicates);\n }\n return set;\n};\n\n/**\n * Return how many unique items are in this ArraySet. If duplicates have been\n * added, than those do not count towards the size.\n *\n * @returns Number\n */\nArraySet.prototype.size = function ArraySet_size() {\n return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length;\n};\n\n/**\n * Add the given string to this set.\n *\n * @param String aStr\n */\nArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {\n var sStr = hasNativeMap ? aStr : util.toSetString(aStr);\n var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr);\n var idx = this._array.length;\n if (!isDuplicate || aAllowDuplicates) {\n this._array.push(aStr);\n }\n if (!isDuplicate) {\n if (hasNativeMap) {\n this._set.set(aStr, idx);\n } else {\n this._set[sStr] = idx;\n }\n }\n};\n\n/**\n * Is the given string a member of this set?\n *\n * @param String aStr\n */\nArraySet.prototype.has = function ArraySet_has(aStr) {\n if (hasNativeMap) {\n return this._set.has(aStr);\n } else {\n var sStr = util.toSetString(aStr);\n return has.call(this._set, sStr);\n }\n};\n\n/**\n * What is the index of the given string in the array?\n *\n * @param String aStr\n */\nArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {\n if (hasNativeMap) {\n var idx = this._set.get(aStr);\n if (idx >= 0) {\n return idx;\n }\n } else {\n var sStr = util.toSetString(aStr);\n if (has.call(this._set, sStr)) {\n return this._set[sStr];\n }\n }\n\n throw new Error('\"' + aStr + '\" is not in the set.');\n};\n\n/**\n * What is the element at the given index?\n *\n * @param Number aIdx\n */\nArraySet.prototype.at = function ArraySet_at(aIdx) {\n if (aIdx >= 0 && aIdx < this._array.length) {\n return this._array[aIdx];\n }\n throw new Error('No element indexed by ' + aIdx);\n};\n\n/**\n * Returns the array representation of this set (which has the proper indices\n * indicated by indexOf). Note that this is a copy of the internal array used\n * for storing the members so that no one can mess with internal state.\n */\nArraySet.prototype.toArray = function ArraySet_toArray() {\n return this._array.slice();\n};\n\nexports.ArraySet = ArraySet;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/array-set.js\n// module id = 5\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2014 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\n\n/**\n * Determine whether mappingB is after mappingA with respect to generated\n * position.\n */\nfunction generatedPositionAfter(mappingA, mappingB) {\n // Optimized for most common case\n var lineA = mappingA.generatedLine;\n var lineB = mappingB.generatedLine;\n var columnA = mappingA.generatedColumn;\n var columnB = mappingB.generatedColumn;\n return lineB > lineA || lineB == lineA && columnB >= columnA ||\n util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n}\n\n/**\n * A data structure to provide a sorted view of accumulated mappings in a\n * performance conscious manner. It trades a neglibable overhead in general\n * case for a large speedup in case of mappings being added in order.\n */\nfunction MappingList() {\n this._array = [];\n this._sorted = true;\n // Serves as infimum\n this._last = {generatedLine: -1, generatedColumn: 0};\n}\n\n/**\n * Iterate through internal items. This method takes the same arguments that\n * `Array.prototype.forEach` takes.\n *\n * NOTE: The order of the mappings is NOT guaranteed.\n */\nMappingList.prototype.unsortedForEach =\n function MappingList_forEach(aCallback, aThisArg) {\n this._array.forEach(aCallback, aThisArg);\n };\n\n/**\n * Add the given source mapping.\n *\n * @param Object aMapping\n */\nMappingList.prototype.add = function MappingList_add(aMapping) {\n if (generatedPositionAfter(this._last, aMapping)) {\n this._last = aMapping;\n this._array.push(aMapping);\n } else {\n this._sorted = false;\n this._array.push(aMapping);\n }\n};\n\n/**\n * Returns the flat, sorted array of mappings. The mappings are sorted by\n * generated position.\n *\n * WARNING: This method returns internal data without copying, for\n * performance. The return value must NOT be mutated, and should be treated as\n * an immutable borrow. If you want to take ownership, you must make your own\n * copy.\n */\nMappingList.prototype.toArray = function MappingList_toArray() {\n if (!this._sorted) {\n this._array.sort(util.compareByGeneratedPositionsInflated);\n this._sorted = true;\n }\n return this._array;\n};\n\nexports.MappingList = MappingList;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/mapping-list.js\n// module id = 6\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar binarySearch = require('./binary-search');\nvar ArraySet = require('./array-set').ArraySet;\nvar base64VLQ = require('./base64-vlq');\nvar quickSort = require('./quick-sort').quickSort;\n\nfunction SourceMapConsumer(aSourceMap) {\n var sourceMap = aSourceMap;\n if (typeof aSourceMap === 'string') {\n sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n }\n\n return sourceMap.sections != null\n ? new IndexedSourceMapConsumer(sourceMap)\n : new BasicSourceMapConsumer(sourceMap);\n}\n\nSourceMapConsumer.fromSourceMap = function(aSourceMap) {\n return BasicSourceMapConsumer.fromSourceMap(aSourceMap);\n}\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nSourceMapConsumer.prototype._version = 3;\n\n// `__generatedMappings` and `__originalMappings` are arrays that hold the\n// parsed mapping coordinates from the source map's \"mappings\" attribute. They\n// are lazily instantiated, accessed via the `_generatedMappings` and\n// `_originalMappings` getters respectively, and we only parse the mappings\n// and create these arrays once queried for a source location. We jump through\n// these hoops because there can be many thousands of mappings, and parsing\n// them is expensive, so we only want to do it if we must.\n//\n// Each object in the arrays is of the form:\n//\n// {\n// generatedLine: The line number in the generated code,\n// generatedColumn: The column number in the generated code,\n// source: The path to the original source file that generated this\n// chunk of code,\n// originalLine: The line number in the original source that\n// corresponds to this chunk of generated code,\n// originalColumn: The column number in the original source that\n// corresponds to this chunk of generated code,\n// name: The name of the original symbol which generated this chunk of\n// code.\n// }\n//\n// All properties except for `generatedLine` and `generatedColumn` can be\n// `null`.\n//\n// `_generatedMappings` is ordered by the generated positions.\n//\n// `_originalMappings` is ordered by the original positions.\n\nSourceMapConsumer.prototype.__generatedMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {\n get: function () {\n if (!this.__generatedMappings) {\n this._parseMappings(this._mappings, this.sourceRoot);\n }\n\n return this.__generatedMappings;\n }\n});\n\nSourceMapConsumer.prototype.__originalMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {\n get: function () {\n if (!this.__originalMappings) {\n this._parseMappings(this._mappings, this.sourceRoot);\n }\n\n return this.__originalMappings;\n }\n});\n\nSourceMapConsumer.prototype._charIsMappingSeparator =\n function SourceMapConsumer_charIsMappingSeparator(aStr, index) {\n var c = aStr.charAt(index);\n return c === \";\" || c === \",\";\n };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nSourceMapConsumer.prototype._parseMappings =\n function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n throw new Error(\"Subclasses must implement _parseMappings\");\n };\n\nSourceMapConsumer.GENERATED_ORDER = 1;\nSourceMapConsumer.ORIGINAL_ORDER = 2;\n\nSourceMapConsumer.GREATEST_LOWER_BOUND = 1;\nSourceMapConsumer.LEAST_UPPER_BOUND = 2;\n\n/**\n * Iterate over each mapping between an original source/line/column and a\n * generated line/column in this source map.\n *\n * @param Function aCallback\n * The function that is called with each mapping.\n * @param Object aContext\n * Optional. If specified, this object will be the value of `this` every\n * time that `aCallback` is called.\n * @param aOrder\n * Either `SourceMapConsumer.GENERATED_ORDER` or\n * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to\n * iterate over the mappings sorted by the generated file's line/column\n * order or the original's source/line/column order, respectively. Defaults to\n * `SourceMapConsumer.GENERATED_ORDER`.\n */\nSourceMapConsumer.prototype.eachMapping =\n function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {\n var context = aContext || null;\n var order = aOrder || SourceMapConsumer.GENERATED_ORDER;\n\n var mappings;\n switch (order) {\n case SourceMapConsumer.GENERATED_ORDER:\n mappings = this._generatedMappings;\n break;\n case SourceMapConsumer.ORIGINAL_ORDER:\n mappings = this._originalMappings;\n break;\n default:\n throw new Error(\"Unknown order of iteration.\");\n }\n\n var sourceRoot = this.sourceRoot;\n mappings.map(function (mapping) {\n var source = mapping.source === null ? null : this._sources.at(mapping.source);\n if (source != null && sourceRoot != null) {\n source = util.join(sourceRoot, source);\n }\n return {\n source: source,\n generatedLine: mapping.generatedLine,\n generatedColumn: mapping.generatedColumn,\n originalLine: mapping.originalLine,\n originalColumn: mapping.originalColumn,\n name: mapping.name === null ? null : this._names.at(mapping.name)\n };\n }, this).forEach(aCallback, context);\n };\n\n/**\n * Returns all generated line and column information for the original source,\n * line, and column provided. If no column is provided, returns all mappings\n * corresponding to a either the line we are searching for or the next\n * closest line that has any mappings. Otherwise, returns all mappings\n * corresponding to the given line and either the column we are searching for\n * or the next closest column that has any offsets.\n *\n * The only argument is an object with the following properties:\n *\n * - source: The filename of the original source.\n * - line: The line number in the original source.\n * - column: Optional. the column number in the original source.\n *\n * and an array of objects is returned, each with the following properties:\n *\n * - line: The line number in the generated source, or null.\n * - column: The column number in the generated source, or null.\n */\nSourceMapConsumer.prototype.allGeneratedPositionsFor =\n function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {\n var line = util.getArg(aArgs, 'line');\n\n // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping\n // returns the index of the closest mapping less than the needle. By\n // setting needle.originalColumn to 0, we thus find the last mapping for\n // the given line, provided such a mapping exists.\n var needle = {\n source: util.getArg(aArgs, 'source'),\n originalLine: line,\n originalColumn: util.getArg(aArgs, 'column', 0)\n };\n\n if (this.sourceRoot != null) {\n needle.source = util.relative(this.sourceRoot, needle.source);\n }\n if (!this._sources.has(needle.source)) {\n return [];\n }\n needle.source = this._sources.indexOf(needle.source);\n\n var mappings = [];\n\n var index = this._findMapping(needle,\n this._originalMappings,\n \"originalLine\",\n \"originalColumn\",\n util.compareByOriginalPositions,\n binarySearch.LEAST_UPPER_BOUND);\n if (index >= 0) {\n var mapping = this._originalMappings[index];\n\n if (aArgs.column === undefined) {\n var originalLine = mapping.originalLine;\n\n // Iterate until either we run out of mappings, or we run into\n // a mapping for a different line than the one we found. Since\n // mappings are sorted, this is guaranteed to find all mappings for\n // the line we found.\n while (mapping && mapping.originalLine === originalLine) {\n mappings.push({\n line: util.getArg(mapping, 'generatedLine', null),\n column: util.getArg(mapping, 'generatedColumn', null),\n lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n });\n\n mapping = this._originalMappings[++index];\n }\n } else {\n var originalColumn = mapping.originalColumn;\n\n // Iterate until either we run out of mappings, or we run into\n // a mapping for a different line than the one we were searching for.\n // Since mappings are sorted, this is guaranteed to find all mappings for\n // the line we are searching for.\n while (mapping &&\n mapping.originalLine === line &&\n mapping.originalColumn == originalColumn) {\n mappings.push({\n line: util.getArg(mapping, 'generatedLine', null),\n column: util.getArg(mapping, 'generatedColumn', null),\n lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n });\n\n mapping = this._originalMappings[++index];\n }\n }\n }\n\n return mappings;\n };\n\nexports.SourceMapConsumer = SourceMapConsumer;\n\n/**\n * A BasicSourceMapConsumer instance represents a parsed source map which we can\n * query for information about the original file positions by giving it a file\n * position in the generated source.\n *\n * The only parameter is the raw source map (either as a JSON string, or\n * already parsed to an object). According to the spec, source maps have the\n * following attributes:\n *\n * - version: Which version of the source map spec this map is following.\n * - sources: An array of URLs to the original source files.\n * - names: An array of identifiers which can be referrenced by individual mappings.\n * - sourceRoot: Optional. The URL root from which all sources are relative.\n * - sourcesContent: Optional. An array of contents of the original source files.\n * - mappings: A string of base64 VLQs which contain the actual mappings.\n * - file: Optional. The generated file this source map is associated with.\n *\n * Here is an example source map, taken from the source map spec[0]:\n *\n * {\n * version : 3,\n * file: \"out.js\",\n * sourceRoot : \"\",\n * sources: [\"foo.js\", \"bar.js\"],\n * names: [\"src\", \"maps\", \"are\", \"fun\"],\n * mappings: \"AA,AB;;ABCDE;\"\n * }\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#\n */\nfunction BasicSourceMapConsumer(aSourceMap) {\n var sourceMap = aSourceMap;\n if (typeof aSourceMap === 'string') {\n sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n }\n\n var version = util.getArg(sourceMap, 'version');\n var sources = util.getArg(sourceMap, 'sources');\n // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which\n // requires the array) to play nice here.\n var names = util.getArg(sourceMap, 'names', []);\n var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);\n var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);\n var mappings = util.getArg(sourceMap, 'mappings');\n var file = util.getArg(sourceMap, 'file', null);\n\n // Once again, Sass deviates from the spec and supplies the version as a\n // string rather than a number, so we use loose equality checking here.\n if (version != this._version) {\n throw new Error('Unsupported version: ' + version);\n }\n\n sources = sources\n .map(String)\n // Some source maps produce relative source paths like \"./foo.js\" instead of\n // \"foo.js\". Normalize these first so that future comparisons will succeed.\n // See bugzil.la/1090768.\n .map(util.normalize)\n // Always ensure that absolute sources are internally stored relative to\n // the source root, if the source root is absolute. Not doing this would\n // be particularly problematic when the source root is a prefix of the\n // source (valid, but why??). See github issue #199 and bugzil.la/1188982.\n .map(function (source) {\n return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)\n ? util.relative(sourceRoot, source)\n : source;\n });\n\n // Pass `true` below to allow duplicate names and sources. While source maps\n // are intended to be compressed and deduplicated, the TypeScript compiler\n // sometimes generates source maps with duplicates in them. See Github issue\n // #72 and bugzil.la/889492.\n this._names = ArraySet.fromArray(names.map(String), true);\n this._sources = ArraySet.fromArray(sources, true);\n\n this.sourceRoot = sourceRoot;\n this.sourcesContent = sourcesContent;\n this._mappings = mappings;\n this.file = file;\n}\n\nBasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nBasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;\n\n/**\n * Create a BasicSourceMapConsumer from a SourceMapGenerator.\n *\n * @param SourceMapGenerator aSourceMap\n * The source map that will be consumed.\n * @returns BasicSourceMapConsumer\n */\nBasicSourceMapConsumer.fromSourceMap =\n function SourceMapConsumer_fromSourceMap(aSourceMap) {\n var smc = Object.create(BasicSourceMapConsumer.prototype);\n\n var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);\n var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);\n smc.sourceRoot = aSourceMap._sourceRoot;\n smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),\n smc.sourceRoot);\n smc.file = aSourceMap._file;\n\n // Because we are modifying the entries (by converting string sources and\n // names to indices into the sources and names ArraySets), we have to make\n // a copy of the entry or else bad things happen. Shared mutable state\n // strikes again! See github issue #191.\n\n var generatedMappings = aSourceMap._mappings.toArray().slice();\n var destGeneratedMappings = smc.__generatedMappings = [];\n var destOriginalMappings = smc.__originalMappings = [];\n\n for (var i = 0, length = generatedMappings.length; i < length; i++) {\n var srcMapping = generatedMappings[i];\n var destMapping = new Mapping;\n destMapping.generatedLine = srcMapping.generatedLine;\n destMapping.generatedColumn = srcMapping.generatedColumn;\n\n if (srcMapping.source) {\n destMapping.source = sources.indexOf(srcMapping.source);\n destMapping.originalLine = srcMapping.originalLine;\n destMapping.originalColumn = srcMapping.originalColumn;\n\n if (srcMapping.name) {\n destMapping.name = names.indexOf(srcMapping.name);\n }\n\n destOriginalMappings.push(destMapping);\n }\n\n destGeneratedMappings.push(destMapping);\n }\n\n quickSort(smc.__originalMappings, util.compareByOriginalPositions);\n\n return smc;\n };\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nBasicSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {\n get: function () {\n return this._sources.toArray().map(function (s) {\n return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;\n }, this);\n }\n});\n\n/**\n * Provide the JIT with a nice shape / hidden class.\n */\nfunction Mapping() {\n this.generatedLine = 0;\n this.generatedColumn = 0;\n this.source = null;\n this.originalLine = null;\n this.originalColumn = null;\n this.name = null;\n}\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nBasicSourceMapConsumer.prototype._parseMappings =\n function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n var generatedLine = 1;\n var previousGeneratedColumn = 0;\n var previousOriginalLine = 0;\n var previousOriginalColumn = 0;\n var previousSource = 0;\n var previousName = 0;\n var length = aStr.length;\n var index = 0;\n var cachedSegments = {};\n var temp = {};\n var originalMappings = [];\n var generatedMappings = [];\n var mapping, str, segment, end, value;\n\n while (index < length) {\n if (aStr.charAt(index) === ';') {\n generatedLine++;\n index++;\n previousGeneratedColumn = 0;\n }\n else if (aStr.charAt(index) === ',') {\n index++;\n }\n else {\n mapping = new Mapping();\n mapping.generatedLine = generatedLine;\n\n // Because each offset is encoded relative to the previous one,\n // many segments often have the same encoding. We can exploit this\n // fact by caching the parsed variable length fields of each segment,\n // allowing us to avoid a second parse if we encounter the same\n // segment again.\n for (end = index; end < length; end++) {\n if (this._charIsMappingSeparator(aStr, end)) {\n break;\n }\n }\n str = aStr.slice(index, end);\n\n segment = cachedSegments[str];\n if (segment) {\n index += str.length;\n } else {\n segment = [];\n while (index < end) {\n base64VLQ.decode(aStr, index, temp);\n value = temp.value;\n index = temp.rest;\n segment.push(value);\n }\n\n if (segment.length === 2) {\n throw new Error('Found a source, but no line and column');\n }\n\n if (segment.length === 3) {\n throw new Error('Found a source and line, but no column');\n }\n\n cachedSegments[str] = segment;\n }\n\n // Generated column.\n mapping.generatedColumn = previousGeneratedColumn + segment[0];\n previousGeneratedColumn = mapping.generatedColumn;\n\n if (segment.length > 1) {\n // Original source.\n mapping.source = previousSource + segment[1];\n previousSource += segment[1];\n\n // Original line.\n mapping.originalLine = previousOriginalLine + segment[2];\n previousOriginalLine = mapping.originalLine;\n // Lines are stored 0-based\n mapping.originalLine += 1;\n\n // Original column.\n mapping.originalColumn = previousOriginalColumn + segment[3];\n previousOriginalColumn = mapping.originalColumn;\n\n if (segment.length > 4) {\n // Original name.\n mapping.name = previousName + segment[4];\n previousName += segment[4];\n }\n }\n\n generatedMappings.push(mapping);\n if (typeof mapping.originalLine === 'number') {\n originalMappings.push(mapping);\n }\n }\n }\n\n quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);\n this.__generatedMappings = generatedMappings;\n\n quickSort(originalMappings, util.compareByOriginalPositions);\n this.__originalMappings = originalMappings;\n };\n\n/**\n * Find the mapping that best matches the hypothetical \"needle\" mapping that\n * we are searching for in the given \"haystack\" of mappings.\n */\nBasicSourceMapConsumer.prototype._findMapping =\n function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,\n aColumnName, aComparator, aBias) {\n // To return the position we are searching for, we must first find the\n // mapping for the given position and then return the opposite position it\n // points to. Because the mappings are sorted, we can use binary search to\n // find the best mapping.\n\n if (aNeedle[aLineName] <= 0) {\n throw new TypeError('Line must be greater than or equal to 1, got '\n + aNeedle[aLineName]);\n }\n if (aNeedle[aColumnName] < 0) {\n throw new TypeError('Column must be greater than or equal to 0, got '\n + aNeedle[aColumnName]);\n }\n\n return binarySearch.search(aNeedle, aMappings, aComparator, aBias);\n };\n\n/**\n * Compute the last column for each generated mapping. The last column is\n * inclusive.\n */\nBasicSourceMapConsumer.prototype.computeColumnSpans =\n function SourceMapConsumer_computeColumnSpans() {\n for (var index = 0; index < this._generatedMappings.length; ++index) {\n var mapping = this._generatedMappings[index];\n\n // Mappings do not contain a field for the last generated columnt. We\n // can come up with an optimistic estimate, however, by assuming that\n // mappings are contiguous (i.e. given two consecutive mappings, the\n // first mapping ends where the second one starts).\n if (index + 1 < this._generatedMappings.length) {\n var nextMapping = this._generatedMappings[index + 1];\n\n if (mapping.generatedLine === nextMapping.generatedLine) {\n mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;\n continue;\n }\n }\n\n // The last mapping for each line spans the entire line.\n mapping.lastGeneratedColumn = Infinity;\n }\n };\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n * - line: The line number in the generated source.\n * - column: The column number in the generated source.\n * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n * - source: The original source file, or null.\n * - line: The line number in the original source, or null.\n * - column: The column number in the original source, or null.\n * - name: The original identifier, or null.\n */\nBasicSourceMapConsumer.prototype.originalPositionFor =\n function SourceMapConsumer_originalPositionFor(aArgs) {\n var needle = {\n generatedLine: util.getArg(aArgs, 'line'),\n generatedColumn: util.getArg(aArgs, 'column')\n };\n\n var index = this._findMapping(\n needle,\n this._generatedMappings,\n \"generatedLine\",\n \"generatedColumn\",\n util.compareByGeneratedPositionsDeflated,\n util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n );\n\n if (index >= 0) {\n var mapping = this._generatedMappings[index];\n\n if (mapping.generatedLine === needle.generatedLine) {\n var source = util.getArg(mapping, 'source', null);\n if (source !== null) {\n source = this._sources.at(source);\n if (this.sourceRoot != null) {\n source = util.join(this.sourceRoot, source);\n }\n }\n var name = util.getArg(mapping, 'name', null);\n if (name !== null) {\n name = this._names.at(name);\n }\n return {\n source: source,\n line: util.getArg(mapping, 'originalLine', null),\n column: util.getArg(mapping, 'originalColumn', null),\n name: name\n };\n }\n }\n\n return {\n source: null,\n line: null,\n column: null,\n name: null\n };\n };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nBasicSourceMapConsumer.prototype.hasContentsOfAllSources =\n function BasicSourceMapConsumer_hasContentsOfAllSources() {\n if (!this.sourcesContent) {\n return false;\n }\n return this.sourcesContent.length >= this._sources.size() &&\n !this.sourcesContent.some(function (sc) { return sc == null; });\n };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nBasicSourceMapConsumer.prototype.sourceContentFor =\n function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n if (!this.sourcesContent) {\n return null;\n }\n\n if (this.sourceRoot != null) {\n aSource = util.relative(this.sourceRoot, aSource);\n }\n\n if (this._sources.has(aSource)) {\n return this.sourcesContent[this._sources.indexOf(aSource)];\n }\n\n var url;\n if (this.sourceRoot != null\n && (url = util.urlParse(this.sourceRoot))) {\n // XXX: file:// URIs and absolute paths lead to unexpected behavior for\n // many users. We can help them out when they expect file:// URIs to\n // behave like it would if they were running a local HTTP server. See\n // https://bugzilla.mozilla.org/show_bug.cgi?id=885597.\n var fileUriAbsPath = aSource.replace(/^file:\\/\\//, \"\");\n if (url.scheme == \"file\"\n && this._sources.has(fileUriAbsPath)) {\n return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]\n }\n\n if ((!url.path || url.path == \"/\")\n && this._sources.has(\"/\" + aSource)) {\n return this.sourcesContent[this._sources.indexOf(\"/\" + aSource)];\n }\n }\n\n // This function is used recursively from\n // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we\n // don't want to throw if we can't find the source - we just want to\n // return null, so we provide a flag to exit gracefully.\n if (nullOnMissing) {\n return null;\n }\n else {\n throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n }\n };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n * - source: The filename of the original source.\n * - line: The line number in the original source.\n * - column: The column number in the original source.\n * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n * - line: The line number in the generated source, or null.\n * - column: The column number in the generated source, or null.\n */\nBasicSourceMapConsumer.prototype.generatedPositionFor =\n function SourceMapConsumer_generatedPositionFor(aArgs) {\n var source = util.getArg(aArgs, 'source');\n if (this.sourceRoot != null) {\n source = util.relative(this.sourceRoot, source);\n }\n if (!this._sources.has(source)) {\n return {\n line: null,\n column: null,\n lastColumn: null\n };\n }\n source = this._sources.indexOf(source);\n\n var needle = {\n source: source,\n originalLine: util.getArg(aArgs, 'line'),\n originalColumn: util.getArg(aArgs, 'column')\n };\n\n var index = this._findMapping(\n needle,\n this._originalMappings,\n \"originalLine\",\n \"originalColumn\",\n util.compareByOriginalPositions,\n util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n );\n\n if (index >= 0) {\n var mapping = this._originalMappings[index];\n\n if (mapping.source === needle.source) {\n return {\n line: util.getArg(mapping, 'generatedLine', null),\n column: util.getArg(mapping, 'generatedColumn', null),\n lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n };\n }\n }\n\n return {\n line: null,\n column: null,\n lastColumn: null\n };\n };\n\nexports.BasicSourceMapConsumer = BasicSourceMapConsumer;\n\n/**\n * An IndexedSourceMapConsumer instance represents a parsed source map which\n * we can query for information. It differs from BasicSourceMapConsumer in\n * that it takes \"indexed\" source maps (i.e. ones with a \"sections\" field) as\n * input.\n *\n * The only parameter is a raw source map (either as a JSON string, or already\n * parsed to an object). According to the spec for indexed source maps, they\n * have the following attributes:\n *\n * - version: Which version of the source map spec this map is following.\n * - file: Optional. The generated file this source map is associated with.\n * - sections: A list of section definitions.\n *\n * Each value under the \"sections\" field has two fields:\n * - offset: The offset into the original specified at which this section\n * begins to apply, defined as an object with a \"line\" and \"column\"\n * field.\n * - map: A source map definition. This source map could also be indexed,\n * but doesn't have to be.\n *\n * Instead of the \"map\" field, it's also possible to have a \"url\" field\n * specifying a URL to retrieve a source map from, but that's currently\n * unsupported.\n *\n * Here's an example source map, taken from the source map spec[0], but\n * modified to omit a section which uses the \"url\" field.\n *\n * {\n * version : 3,\n * file: \"app.js\",\n * sections: [{\n * offset: {line:100, column:10},\n * map: {\n * version : 3,\n * file: \"section.js\",\n * sources: [\"foo.js\", \"bar.js\"],\n * names: [\"src\", \"maps\", \"are\", \"fun\"],\n * mappings: \"AAAA,E;;ABCDE;\"\n * }\n * }],\n * }\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt\n */\nfunction IndexedSourceMapConsumer(aSourceMap) {\n var sourceMap = aSourceMap;\n if (typeof aSourceMap === 'string') {\n sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n }\n\n var version = util.getArg(sourceMap, 'version');\n var sections = util.getArg(sourceMap, 'sections');\n\n if (version != this._version) {\n throw new Error('Unsupported version: ' + version);\n }\n\n this._sources = new ArraySet();\n this._names = new ArraySet();\n\n var lastOffset = {\n line: -1,\n column: 0\n };\n this._sections = sections.map(function (s) {\n if (s.url) {\n // The url field will require support for asynchronicity.\n // See https://github.com/mozilla/source-map/issues/16\n throw new Error('Support for url field in sections not implemented.');\n }\n var offset = util.getArg(s, 'offset');\n var offsetLine = util.getArg(offset, 'line');\n var offsetColumn = util.getArg(offset, 'column');\n\n if (offsetLine < lastOffset.line ||\n (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {\n throw new Error('Section offsets must be ordered and non-overlapping.');\n }\n lastOffset = offset;\n\n return {\n generatedOffset: {\n // The offset fields are 0-based, but we use 1-based indices when\n // encoding/decoding from VLQ.\n generatedLine: offsetLine + 1,\n generatedColumn: offsetColumn + 1\n },\n consumer: new SourceMapConsumer(util.getArg(s, 'map'))\n }\n });\n}\n\nIndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nIndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nIndexedSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {\n get: function () {\n var sources = [];\n for (var i = 0; i < this._sections.length; i++) {\n for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {\n sources.push(this._sections[i].consumer.sources[j]);\n }\n }\n return sources;\n }\n});\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n * - line: The line number in the generated source.\n * - column: The column number in the generated source.\n *\n * and an object is returned with the following properties:\n *\n * - source: The original source file, or null.\n * - line: The line number in the original source, or null.\n * - column: The column number in the original source, or null.\n * - name: The original identifier, or null.\n */\nIndexedSourceMapConsumer.prototype.originalPositionFor =\n function IndexedSourceMapConsumer_originalPositionFor(aArgs) {\n var needle = {\n generatedLine: util.getArg(aArgs, 'line'),\n generatedColumn: util.getArg(aArgs, 'column')\n };\n\n // Find the section containing the generated position we're trying to map\n // to an original position.\n var sectionIndex = binarySearch.search(needle, this._sections,\n function(needle, section) {\n var cmp = needle.generatedLine - section.generatedOffset.generatedLine;\n if (cmp) {\n return cmp;\n }\n\n return (needle.generatedColumn -\n section.generatedOffset.generatedColumn);\n });\n var section = this._sections[sectionIndex];\n\n if (!section) {\n return {\n source: null,\n line: null,\n column: null,\n name: null\n };\n }\n\n return section.consumer.originalPositionFor({\n line: needle.generatedLine -\n (section.generatedOffset.generatedLine - 1),\n column: needle.generatedColumn -\n (section.generatedOffset.generatedLine === needle.generatedLine\n ? section.generatedOffset.generatedColumn - 1\n : 0),\n bias: aArgs.bias\n });\n };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nIndexedSourceMapConsumer.prototype.hasContentsOfAllSources =\n function IndexedSourceMapConsumer_hasContentsOfAllSources() {\n return this._sections.every(function (s) {\n return s.consumer.hasContentsOfAllSources();\n });\n };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nIndexedSourceMapConsumer.prototype.sourceContentFor =\n function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n for (var i = 0; i < this._sections.length; i++) {\n var section = this._sections[i];\n\n var content = section.consumer.sourceContentFor(aSource, true);\n if (content) {\n return content;\n }\n }\n if (nullOnMissing) {\n return null;\n }\n else {\n throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n }\n };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n * - source: The filename of the original source.\n * - line: The line number in the original source.\n * - column: The column number in the original source.\n *\n * and an object is returned with the following properties:\n *\n * - line: The line number in the generated source, or null.\n * - column: The column number in the generated source, or null.\n */\nIndexedSourceMapConsumer.prototype.generatedPositionFor =\n function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {\n for (var i = 0; i < this._sections.length; i++) {\n var section = this._sections[i];\n\n // Only consider this section if the requested source is in the list of\n // sources of the consumer.\n if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) {\n continue;\n }\n var generatedPosition = section.consumer.generatedPositionFor(aArgs);\n if (generatedPosition) {\n var ret = {\n line: generatedPosition.line +\n (section.generatedOffset.generatedLine - 1),\n column: generatedPosition.column +\n (section.generatedOffset.generatedLine === generatedPosition.line\n ? section.generatedOffset.generatedColumn - 1\n : 0)\n };\n return ret;\n }\n }\n\n return {\n line: null,\n column: null\n };\n };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nIndexedSourceMapConsumer.prototype._parseMappings =\n function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n this.__generatedMappings = [];\n this.__originalMappings = [];\n for (var i = 0; i < this._sections.length; i++) {\n var section = this._sections[i];\n var sectionMappings = section.consumer._generatedMappings;\n for (var j = 0; j < sectionMappings.length; j++) {\n var mapping = sectionMappings[j];\n\n var source = section.consumer._sources.at(mapping.source);\n if (section.consumer.sourceRoot !== null) {\n source = util.join(section.consumer.sourceRoot, source);\n }\n this._sources.add(source);\n source = this._sources.indexOf(source);\n\n var name = section.consumer._names.at(mapping.name);\n this._names.add(name);\n name = this._names.indexOf(name);\n\n // The mappings coming from the consumer for the section have\n // generated positions relative to the start of the section, so we\n // need to offset them to be relative to the start of the concatenated\n // generated file.\n var adjustedMapping = {\n source: source,\n generatedLine: mapping.generatedLine +\n (section.generatedOffset.generatedLine - 1),\n generatedColumn: mapping.generatedColumn +\n (section.generatedOffset.generatedLine === mapping.generatedLine\n ? section.generatedOffset.generatedColumn - 1\n : 0),\n originalLine: mapping.originalLine,\n originalColumn: mapping.originalColumn,\n name: name\n };\n\n this.__generatedMappings.push(adjustedMapping);\n if (typeof adjustedMapping.originalLine === 'number') {\n this.__originalMappings.push(adjustedMapping);\n }\n }\n }\n\n quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);\n quickSort(this.__originalMappings, util.compareByOriginalPositions);\n };\n\nexports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-map-consumer.js\n// module id = 7\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nexports.GREATEST_LOWER_BOUND = 1;\nexports.LEAST_UPPER_BOUND = 2;\n\n/**\n * Recursive implementation of binary search.\n *\n * @param aLow Indices here and lower do not contain the needle.\n * @param aHigh Indices here and higher do not contain the needle.\n * @param aNeedle The element being searched for.\n * @param aHaystack The non-empty array being searched.\n * @param aCompare Function which takes two elements and returns -1, 0, or 1.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n */\nfunction recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {\n // This function terminates when one of the following is true:\n //\n // 1. We find the exact element we are looking for.\n //\n // 2. We did not find the exact element, but we can return the index of\n // the next-closest element.\n //\n // 3. We did not find the exact element, and there is no next-closest\n // element than the one we are searching for, so we return -1.\n var mid = Math.floor((aHigh - aLow) / 2) + aLow;\n var cmp = aCompare(aNeedle, aHaystack[mid], true);\n if (cmp === 0) {\n // Found the element we are looking for.\n return mid;\n }\n else if (cmp > 0) {\n // Our needle is greater than aHaystack[mid].\n if (aHigh - mid > 1) {\n // The element is in the upper half.\n return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);\n }\n\n // The exact needle element was not found in this haystack. Determine if\n // we are in termination case (3) or (2) and return the appropriate thing.\n if (aBias == exports.LEAST_UPPER_BOUND) {\n return aHigh < aHaystack.length ? aHigh : -1;\n } else {\n return mid;\n }\n }\n else {\n // Our needle is less than aHaystack[mid].\n if (mid - aLow > 1) {\n // The element is in the lower half.\n return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);\n }\n\n // we are in termination case (3) or (2) and return the appropriate thing.\n if (aBias == exports.LEAST_UPPER_BOUND) {\n return mid;\n } else {\n return aLow < 0 ? -1 : aLow;\n }\n }\n}\n\n/**\n * This is an implementation of binary search which will always try and return\n * the index of the closest element if there is no exact hit. This is because\n * mappings between original and generated line/col pairs are single points,\n * and there is an implicit region between each of them, so a miss just means\n * that you aren't on the very start of a region.\n *\n * @param aNeedle The element you are looking for.\n * @param aHaystack The array that is being searched.\n * @param aCompare A function which takes the needle and an element in the\n * array and returns -1, 0, or 1 depending on whether the needle is less\n * than, equal to, or greater than the element, respectively.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.\n */\nexports.search = function search(aNeedle, aHaystack, aCompare, aBias) {\n if (aHaystack.length === 0) {\n return -1;\n }\n\n var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,\n aCompare, aBias || exports.GREATEST_LOWER_BOUND);\n if (index < 0) {\n return -1;\n }\n\n // We have found either the exact element, or the next-closest element than\n // the one we are searching for. However, there may be more than one such\n // element. Make sure we always return the smallest of these.\n while (index - 1 >= 0) {\n if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {\n break;\n }\n --index;\n }\n\n return index;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/binary-search.js\n// module id = 8\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n// It turns out that some (most?) JavaScript engines don't self-host\n// `Array.prototype.sort`. This makes sense because C++ will likely remain\n// faster than JS when doing raw CPU-intensive sorting. However, when using a\n// custom comparator function, calling back and forth between the VM's C++ and\n// JIT'd JS is rather slow *and* loses JIT type information, resulting in\n// worse generated code for the comparator function than would be optimal. In\n// fact, when sorting with a comparator, these costs outweigh the benefits of\n// sorting in C++. By using our own JS-implemented Quick Sort (below), we get\n// a ~3500ms mean speed-up in `bench/bench.html`.\n\n/**\n * Swap the elements indexed by `x` and `y` in the array `ary`.\n *\n * @param {Array} ary\n * The array.\n * @param {Number} x\n * The index of the first item.\n * @param {Number} y\n * The index of the second item.\n */\nfunction swap(ary, x, y) {\n var temp = ary[x];\n ary[x] = ary[y];\n ary[y] = temp;\n}\n\n/**\n * Returns a random integer within the range `low .. high` inclusive.\n *\n * @param {Number} low\n * The lower bound on the range.\n * @param {Number} high\n * The upper bound on the range.\n */\nfunction randomIntInRange(low, high) {\n return Math.round(low + (Math.random() * (high - low)));\n}\n\n/**\n * The Quick Sort algorithm.\n *\n * @param {Array} ary\n * An array to sort.\n * @param {function} comparator\n * Function to use to compare two items.\n * @param {Number} p\n * Start index of the array\n * @param {Number} r\n * End index of the array\n */\nfunction doQuickSort(ary, comparator, p, r) {\n // If our lower bound is less than our upper bound, we (1) partition the\n // array into two pieces and (2) recurse on each half. If it is not, this is\n // the empty array and our base case.\n\n if (p < r) {\n // (1) Partitioning.\n //\n // The partitioning chooses a pivot between `p` and `r` and moves all\n // elements that are less than or equal to the pivot to the before it, and\n // all the elements that are greater than it after it. The effect is that\n // once partition is done, the pivot is in the exact place it will be when\n // the array is put in sorted order, and it will not need to be moved\n // again. This runs in O(n) time.\n\n // Always choose a random pivot so that an input array which is reverse\n // sorted does not cause O(n^2) running time.\n var pivotIndex = randomIntInRange(p, r);\n var i = p - 1;\n\n swap(ary, pivotIndex, r);\n var pivot = ary[r];\n\n // Immediately after `j` is incremented in this loop, the following hold\n // true:\n //\n // * Every element in `ary[p .. i]` is less than or equal to the pivot.\n //\n // * Every element in `ary[i+1 .. j-1]` is greater than the pivot.\n for (var j = p; j < r; j++) {\n if (comparator(ary[j], pivot) <= 0) {\n i += 1;\n swap(ary, i, j);\n }\n }\n\n swap(ary, i + 1, j);\n var q = i + 1;\n\n // (2) Recurse on each half.\n\n doQuickSort(ary, comparator, p, q - 1);\n doQuickSort(ary, comparator, q + 1, r);\n }\n}\n\n/**\n * Sort the given array in-place with the given comparator function.\n *\n * @param {Array} ary\n * An array to sort.\n * @param {function} comparator\n * Function to use to compare two items.\n */\nexports.quickSort = function (ary, comparator) {\n doQuickSort(ary, comparator, 0, ary.length - 1);\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/quick-sort.js\n// module id = 9\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;\nvar util = require('./util');\n\n// Matches a Windows-style `\\r\\n` newline or a `\\n` newline used by all other\n// operating systems these days (capturing the result).\nvar REGEX_NEWLINE = /(\\r?\\n)/;\n\n// Newline character code for charCodeAt() comparisons\nvar NEWLINE_CODE = 10;\n\n// Private symbol for identifying `SourceNode`s when multiple versions of\n// the source-map library are loaded. This MUST NOT CHANGE across\n// versions!\nvar isSourceNode = \"$$$isSourceNode$$$\";\n\n/**\n * SourceNodes provide a way to abstract over interpolating/concatenating\n * snippets of generated JavaScript source code while maintaining the line and\n * column information associated with the original source code.\n *\n * @param aLine The original line number.\n * @param aColumn The original column number.\n * @param aSource The original source's filename.\n * @param aChunks Optional. An array of strings which are snippets of\n * generated JS, or other SourceNodes.\n * @param aName The original identifier.\n */\nfunction SourceNode(aLine, aColumn, aSource, aChunks, aName) {\n this.children = [];\n this.sourceContents = {};\n this.line = aLine == null ? null : aLine;\n this.column = aColumn == null ? null : aColumn;\n this.source = aSource == null ? null : aSource;\n this.name = aName == null ? null : aName;\n this[isSourceNode] = true;\n if (aChunks != null) this.add(aChunks);\n}\n\n/**\n * Creates a SourceNode from generated code and a SourceMapConsumer.\n *\n * @param aGeneratedCode The generated code\n * @param aSourceMapConsumer The SourceMap for the generated code\n * @param aRelativePath Optional. The path that relative sources in the\n * SourceMapConsumer should be relative to.\n */\nSourceNode.fromStringWithSourceMap =\n function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {\n // The SourceNode we want to fill with the generated code\n // and the SourceMap\n var node = new SourceNode();\n\n // All even indices of this array are one line of the generated code,\n // while all odd indices are the newlines between two adjacent lines\n // (since `REGEX_NEWLINE` captures its match).\n // Processed fragments are accessed by calling `shiftNextLine`.\n var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);\n var remainingLinesIndex = 0;\n var shiftNextLine = function() {\n var lineContents = getNextLine();\n // The last line of a file might not have a newline.\n var newLine = getNextLine() || \"\";\n return lineContents + newLine;\n\n function getNextLine() {\n return remainingLinesIndex < remainingLines.length ?\n remainingLines[remainingLinesIndex++] : undefined;\n }\n };\n\n // We need to remember the position of \"remainingLines\"\n var lastGeneratedLine = 1, lastGeneratedColumn = 0;\n\n // The generate SourceNodes we need a code range.\n // To extract it current and last mapping is used.\n // Here we store the last mapping.\n var lastMapping = null;\n\n aSourceMapConsumer.eachMapping(function (mapping) {\n if (lastMapping !== null) {\n // We add the code from \"lastMapping\" to \"mapping\":\n // First check if there is a new line in between.\n if (lastGeneratedLine < mapping.generatedLine) {\n // Associate first line with \"lastMapping\"\n addMappingWithCode(lastMapping, shiftNextLine());\n lastGeneratedLine++;\n lastGeneratedColumn = 0;\n // The remaining code is added without mapping\n } else {\n // There is no new line in between.\n // Associate the code between \"lastGeneratedColumn\" and\n // \"mapping.generatedColumn\" with \"lastMapping\"\n var nextLine = remainingLines[remainingLinesIndex];\n var code = nextLine.substr(0, mapping.generatedColumn -\n lastGeneratedColumn);\n remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn -\n lastGeneratedColumn);\n lastGeneratedColumn = mapping.generatedColumn;\n addMappingWithCode(lastMapping, code);\n // No more remaining code, continue\n lastMapping = mapping;\n return;\n }\n }\n // We add the generated code until the first mapping\n // to the SourceNode without any mapping.\n // Each line is added as separate string.\n while (lastGeneratedLine < mapping.generatedLine) {\n node.add(shiftNextLine());\n lastGeneratedLine++;\n }\n if (lastGeneratedColumn < mapping.generatedColumn) {\n var nextLine = remainingLines[remainingLinesIndex];\n node.add(nextLine.substr(0, mapping.generatedColumn));\n remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn);\n lastGeneratedColumn = mapping.generatedColumn;\n }\n lastMapping = mapping;\n }, this);\n // We have processed all mappings.\n if (remainingLinesIndex < remainingLines.length) {\n if (lastMapping) {\n // Associate the remaining code in the current line with \"lastMapping\"\n addMappingWithCode(lastMapping, shiftNextLine());\n }\n // and add the remaining lines without any mapping\n node.add(remainingLines.splice(remainingLinesIndex).join(\"\"));\n }\n\n // Copy sourcesContent into SourceNode\n aSourceMapConsumer.sources.forEach(function (sourceFile) {\n var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n if (content != null) {\n if (aRelativePath != null) {\n sourceFile = util.join(aRelativePath, sourceFile);\n }\n node.setSourceContent(sourceFile, content);\n }\n });\n\n return node;\n\n function addMappingWithCode(mapping, code) {\n if (mapping === null || mapping.source === undefined) {\n node.add(code);\n } else {\n var source = aRelativePath\n ? util.join(aRelativePath, mapping.source)\n : mapping.source;\n node.add(new SourceNode(mapping.originalLine,\n mapping.originalColumn,\n source,\n code,\n mapping.name));\n }\n }\n };\n\n/**\n * Add a chunk of generated JS to this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n * SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.add = function SourceNode_add(aChunk) {\n if (Array.isArray(aChunk)) {\n aChunk.forEach(function (chunk) {\n this.add(chunk);\n }, this);\n }\n else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n if (aChunk) {\n this.children.push(aChunk);\n }\n }\n else {\n throw new TypeError(\n \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n );\n }\n return this;\n};\n\n/**\n * Add a chunk of generated JS to the beginning of this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n * SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {\n if (Array.isArray(aChunk)) {\n for (var i = aChunk.length-1; i >= 0; i--) {\n this.prepend(aChunk[i]);\n }\n }\n else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n this.children.unshift(aChunk);\n }\n else {\n throw new TypeError(\n \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n );\n }\n return this;\n};\n\n/**\n * Walk over the tree of JS snippets in this node and its children. The\n * walking function is called once for each snippet of JS and is passed that\n * snippet and the its original associated source's line/column location.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walk = function SourceNode_walk(aFn) {\n var chunk;\n for (var i = 0, len = this.children.length; i < len; i++) {\n chunk = this.children[i];\n if (chunk[isSourceNode]) {\n chunk.walk(aFn);\n }\n else {\n if (chunk !== '') {\n aFn(chunk, { source: this.source,\n line: this.line,\n column: this.column,\n name: this.name });\n }\n }\n }\n};\n\n/**\n * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between\n * each of `this.children`.\n *\n * @param aSep The separator.\n */\nSourceNode.prototype.join = function SourceNode_join(aSep) {\n var newChildren;\n var i;\n var len = this.children.length;\n if (len > 0) {\n newChildren = [];\n for (i = 0; i < len-1; i++) {\n newChildren.push(this.children[i]);\n newChildren.push(aSep);\n }\n newChildren.push(this.children[i]);\n this.children = newChildren;\n }\n return this;\n};\n\n/**\n * Call String.prototype.replace on the very right-most source snippet. Useful\n * for trimming whitespace from the end of a source node, etc.\n *\n * @param aPattern The pattern to replace.\n * @param aReplacement The thing to replace the pattern with.\n */\nSourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {\n var lastChild = this.children[this.children.length - 1];\n if (lastChild[isSourceNode]) {\n lastChild.replaceRight(aPattern, aReplacement);\n }\n else if (typeof lastChild === 'string') {\n this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);\n }\n else {\n this.children.push(''.replace(aPattern, aReplacement));\n }\n return this;\n};\n\n/**\n * Set the source content for a source file. This will be added to the SourceMapGenerator\n * in the sourcesContent field.\n *\n * @param aSourceFile The filename of the source file\n * @param aSourceContent The content of the source file\n */\nSourceNode.prototype.setSourceContent =\n function SourceNode_setSourceContent(aSourceFile, aSourceContent) {\n this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;\n };\n\n/**\n * Walk over the tree of SourceNodes. The walking function is called for each\n * source file content and is passed the filename and source content.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walkSourceContents =\n function SourceNode_walkSourceContents(aFn) {\n for (var i = 0, len = this.children.length; i < len; i++) {\n if (this.children[i][isSourceNode]) {\n this.children[i].walkSourceContents(aFn);\n }\n }\n\n var sources = Object.keys(this.sourceContents);\n for (var i = 0, len = sources.length; i < len; i++) {\n aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);\n }\n };\n\n/**\n * Return the string representation of this source node. Walks over the tree\n * and concatenates all the various snippets together to one string.\n */\nSourceNode.prototype.toString = function SourceNode_toString() {\n var str = \"\";\n this.walk(function (chunk) {\n str += chunk;\n });\n return str;\n};\n\n/**\n * Returns the string representation of this source node along with a source\n * map.\n */\nSourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {\n var generated = {\n code: \"\",\n line: 1,\n column: 0\n };\n var map = new SourceMapGenerator(aArgs);\n var sourceMappingActive = false;\n var lastOriginalSource = null;\n var lastOriginalLine = null;\n var lastOriginalColumn = null;\n var lastOriginalName = null;\n this.walk(function (chunk, original) {\n generated.code += chunk;\n if (original.source !== null\n && original.line !== null\n && original.column !== null) {\n if(lastOriginalSource !== original.source\n || lastOriginalLine !== original.line\n || lastOriginalColumn !== original.column\n || lastOriginalName !== original.name) {\n map.addMapping({\n source: original.source,\n original: {\n line: original.line,\n column: original.column\n },\n generated: {\n line: generated.line,\n column: generated.column\n },\n name: original.name\n });\n }\n lastOriginalSource = original.source;\n lastOriginalLine = original.line;\n lastOriginalColumn = original.column;\n lastOriginalName = original.name;\n sourceMappingActive = true;\n } else if (sourceMappingActive) {\n map.addMapping({\n generated: {\n line: generated.line,\n column: generated.column\n }\n });\n lastOriginalSource = null;\n sourceMappingActive = false;\n }\n for (var idx = 0, length = chunk.length; idx < length; idx++) {\n if (chunk.charCodeAt(idx) === NEWLINE_CODE) {\n generated.line++;\n generated.column = 0;\n // Mappings end at eol\n if (idx + 1 === length) {\n lastOriginalSource = null;\n sourceMappingActive = false;\n } else if (sourceMappingActive) {\n map.addMapping({\n source: original.source,\n original: {\n line: original.line,\n column: original.column\n },\n generated: {\n line: generated.line,\n column: generated.column\n },\n name: original.name\n });\n }\n } else {\n generated.column++;\n }\n }\n });\n this.walkSourceContents(function (sourceFile, sourceContent) {\n map.setSourceContent(sourceFile, sourceContent);\n });\n\n return { code: generated.code, map: map };\n};\n\nexports.SourceNode = SourceNode;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-node.js\n// module id = 10\n// module chunks = 0"],"sourceRoot":""} \ No newline at end of file diff --git a/node_modules/source-map/lib/array-set.js b/node_modules/source-map/lib/array-set.js new file mode 100644 index 00000000..fbd5c81c --- /dev/null +++ b/node_modules/source-map/lib/array-set.js @@ -0,0 +1,121 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var has = Object.prototype.hasOwnProperty; +var hasNativeMap = typeof Map !== "undefined"; + +/** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ +function ArraySet() { + this._array = []; + this._set = hasNativeMap ? new Map() : Object.create(null); +} + +/** + * Static method for creating ArraySet instances from an existing array. + */ +ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; +}; + +/** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ +ArraySet.prototype.size = function ArraySet_size() { + return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; +}; + +/** + * Add the given string to this set. + * + * @param String aStr + */ +ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = hasNativeMap ? aStr : util.toSetString(aStr); + var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + if (hasNativeMap) { + this._set.set(aStr, idx); + } else { + this._set[sStr] = idx; + } + } +}; + +/** + * Is the given string a member of this set? + * + * @param String aStr + */ +ArraySet.prototype.has = function ArraySet_has(aStr) { + if (hasNativeMap) { + return this._set.has(aStr); + } else { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + } +}; + +/** + * What is the index of the given string in the array? + * + * @param String aStr + */ +ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + if (hasNativeMap) { + var idx = this._set.get(aStr); + if (idx >= 0) { + return idx; + } + } else { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + } + + throw new Error('"' + aStr + '" is not in the set.'); +}; + +/** + * What is the element at the given index? + * + * @param Number aIdx + */ +ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); +}; + +/** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ +ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); +}; + +exports.ArraySet = ArraySet; diff --git a/node_modules/source-map/lib/base64-vlq.js b/node_modules/source-map/lib/base64-vlq.js new file mode 100644 index 00000000..612b4040 --- /dev/null +++ b/node_modules/source-map/lib/base64-vlq.js @@ -0,0 +1,140 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +var base64 = require('./base64'); + +// A single base 64 digit can contain 6 bits of data. For the base 64 variable +// length quantities we use in the source map spec, the first bit is the sign, +// the next four bits are the actual value, and the 6th bit is the +// continuation bit. The continuation bit tells us whether there are more +// digits in this value following this digit. +// +// Continuation +// | Sign +// | | +// V V +// 101011 + +var VLQ_BASE_SHIFT = 5; + +// binary: 100000 +var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + +// binary: 011111 +var VLQ_BASE_MASK = VLQ_BASE - 1; + +// binary: 100000 +var VLQ_CONTINUATION_BIT = VLQ_BASE; + +/** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ +function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; +} + +/** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ +function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; +} + +/** + * Returns the base 64 VLQ encoded value. + */ +exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; +}; + +/** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ +exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; +}; diff --git a/node_modules/source-map/lib/base64.js b/node_modules/source-map/lib/base64.js new file mode 100644 index 00000000..8aa86b30 --- /dev/null +++ b/node_modules/source-map/lib/base64.js @@ -0,0 +1,67 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + +/** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ +exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); +}; + +/** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ +exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; +}; diff --git a/node_modules/source-map/lib/binary-search.js b/node_modules/source-map/lib/binary-search.js new file mode 100644 index 00000000..010ac941 --- /dev/null +++ b/node_modules/source-map/lib/binary-search.js @@ -0,0 +1,111 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +exports.GREATEST_LOWER_BOUND = 1; +exports.LEAST_UPPER_BOUND = 2; + +/** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ +function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } +} + +/** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ +exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; +}; diff --git a/node_modules/source-map/lib/mapping-list.js b/node_modules/source-map/lib/mapping-list.js new file mode 100644 index 00000000..06d1274a --- /dev/null +++ b/node_modules/source-map/lib/mapping-list.js @@ -0,0 +1,79 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); + +/** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ +function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; +} + +/** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ +function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; +} + +/** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ +MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + +/** + * Add the given source mapping. + * + * @param Object aMapping + */ +MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } +}; + +/** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ +MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; +}; + +exports.MappingList = MappingList; diff --git a/node_modules/source-map/lib/quick-sort.js b/node_modules/source-map/lib/quick-sort.js new file mode 100644 index 00000000..6a7caadb --- /dev/null +++ b/node_modules/source-map/lib/quick-sort.js @@ -0,0 +1,114 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +// It turns out that some (most?) JavaScript engines don't self-host +// `Array.prototype.sort`. This makes sense because C++ will likely remain +// faster than JS when doing raw CPU-intensive sorting. However, when using a +// custom comparator function, calling back and forth between the VM's C++ and +// JIT'd JS is rather slow *and* loses JIT type information, resulting in +// worse generated code for the comparator function than would be optimal. In +// fact, when sorting with a comparator, these costs outweigh the benefits of +// sorting in C++. By using our own JS-implemented Quick Sort (below), we get +// a ~3500ms mean speed-up in `bench/bench.html`. + +/** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ +function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; +} + +/** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ +function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); +} + +/** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ +function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } +} + +/** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ +exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); +}; diff --git a/node_modules/source-map/lib/source-map-consumer.js b/node_modules/source-map/lib/source-map-consumer.js new file mode 100644 index 00000000..6abcc280 --- /dev/null +++ b/node_modules/source-map/lib/source-map-consumer.js @@ -0,0 +1,1082 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var binarySearch = require('./binary-search'); +var ArraySet = require('./array-set').ArraySet; +var base64VLQ = require('./base64-vlq'); +var quickSort = require('./quick-sort').quickSort; + +function SourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap) + : new BasicSourceMapConsumer(sourceMap); +} + +SourceMapConsumer.fromSourceMap = function(aSourceMap) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap); +} + +/** + * The version of the source mapping spec that we are consuming. + */ +SourceMapConsumer.prototype._version = 3; + +// `__generatedMappings` and `__originalMappings` are arrays that hold the +// parsed mapping coordinates from the source map's "mappings" attribute. They +// are lazily instantiated, accessed via the `_generatedMappings` and +// `_originalMappings` getters respectively, and we only parse the mappings +// and create these arrays once queried for a source location. We jump through +// these hoops because there can be many thousands of mappings, and parsing +// them is expensive, so we only want to do it if we must. +// +// Each object in the arrays is of the form: +// +// { +// generatedLine: The line number in the generated code, +// generatedColumn: The column number in the generated code, +// source: The path to the original source file that generated this +// chunk of code, +// originalLine: The line number in the original source that +// corresponds to this chunk of generated code, +// originalColumn: The column number in the original source that +// corresponds to this chunk of generated code, +// name: The name of the original symbol which generated this chunk of +// code. +// } +// +// All properties except for `generatedLine` and `generatedColumn` can be +// `null`. +// +// `_generatedMappings` is ordered by the generated positions. +// +// `_originalMappings` is ordered by the original positions. + +SourceMapConsumer.prototype.__generatedMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } +}); + +SourceMapConsumer.prototype.__originalMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } +}); + +SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + +SourceMapConsumer.GENERATED_ORDER = 1; +SourceMapConsumer.ORIGINAL_ORDER = 2; + +SourceMapConsumer.GREATEST_LOWER_BOUND = 1; +SourceMapConsumer.LEAST_UPPER_BOUND = 2; + +/** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ +SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + if (source != null && sourceRoot != null) { + source = util.join(sourceRoot, source); + } + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + +/** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: Optional. the column number in the original source. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ +SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + if (this.sourceRoot != null) { + needle.source = util.relative(this.sourceRoot, needle.source); + } + if (!this._sources.has(needle.source)) { + return []; + } + needle.source = this._sources.indexOf(needle.source); + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + +exports.SourceMapConsumer = SourceMapConsumer; + +/** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The only parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ +function BasicSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this.file = file; +} + +BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + +/** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @returns BasicSourceMapConsumer + */ +BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + +/** + * The version of the source mapping spec that we are consuming. + */ +BasicSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._sources.toArray().map(function (s) { + return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s; + }, this); + } +}); + +/** + * Provide the JIT with a nice shape / hidden class. + */ +function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; +} + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + +/** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ +BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + +/** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ +BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ +BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + if (this.sourceRoot != null) { + source = util.join(this.sourceRoot, source); + } + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + if (this.sourceRoot != null) { + aSource = util.relative(this.sourceRoot, aSource); + } + + if (this._sources.has(aSource)) { + return this.sourcesContent[this._sources.indexOf(aSource)]; + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = aSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + aSource)) { + return this.sourcesContent[this._sources.indexOf("/" + aSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ +BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + if (this.sourceRoot != null) { + source = util.relative(this.sourceRoot, source); + } + if (!this._sources.has(source)) { + return { + line: null, + column: null, + lastColumn: null + }; + } + source = this._sources.indexOf(source); + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + +exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + +/** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The only parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ +function IndexedSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map')) + } + }); +} + +IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + +/** + * The version of the source mapping spec that we are consuming. + */ +IndexedSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } +}); + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ +IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ +IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + if (section.consumer.sourceRoot !== null) { + source = util.join(section.consumer.sourceRoot, source); + } + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + +exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; diff --git a/node_modules/source-map/lib/source-map-generator.js b/node_modules/source-map/lib/source-map-generator.js new file mode 100644 index 00000000..aff1e7fb --- /dev/null +++ b/node_modules/source-map/lib/source-map-generator.js @@ -0,0 +1,416 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var base64VLQ = require('./base64-vlq'); +var util = require('./util'); +var ArraySet = require('./array-set').ArraySet; +var MappingList = require('./mapping-list').MappingList; + +/** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ +function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; +} + +SourceMapGenerator.prototype._version = 3; + +/** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ +SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + +/** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ +SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + +/** + * Set the source content for a source file. + */ +SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + +/** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ +SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + +/** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ +SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { + throw new Error( + 'original.line and original.column are not numbers -- you probably meant to omit ' + + 'the original mapping entirely and only map the generated position. If so, pass ' + + 'null for the original mapping instead of an object with empty or null values.' + ); + } + + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + +/** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ +SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + +SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + +/** + * Externalize the source map. + */ +SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + +/** + * Render the source map being generated to a string. + */ +SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + +exports.SourceMapGenerator = SourceMapGenerator; diff --git a/node_modules/source-map/lib/source-node.js b/node_modules/source-map/lib/source-node.js new file mode 100644 index 00000000..d196a53f --- /dev/null +++ b/node_modules/source-map/lib/source-node.js @@ -0,0 +1,413 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator; +var util = require('./util'); + +// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other +// operating systems these days (capturing the result). +var REGEX_NEWLINE = /(\r?\n)/; + +// Newline character code for charCodeAt() comparisons +var NEWLINE_CODE = 10; + +// Private symbol for identifying `SourceNode`s when multiple versions of +// the source-map library are loaded. This MUST NOT CHANGE across +// versions! +var isSourceNode = "$$$isSourceNode$$$"; + +/** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ +function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); +} + +/** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ +SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var remainingLinesIndex = 0; + var shiftNextLine = function() { + var lineContents = getNextLine(); + // The last line of a file might not have a newline. + var newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[remainingLinesIndex]; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[remainingLinesIndex]; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + +/** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } +}; + +/** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ +SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; +}; + +/** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ +SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; +}; + +/** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ +SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + +/** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + +/** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ +SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; +}; + +/** + * Returns the string representation of this source node along with a source + * map. + */ +SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; +}; + +exports.SourceNode = SourceNode; diff --git a/node_modules/source-map/lib/util.js b/node_modules/source-map/lib/util.js new file mode 100644 index 00000000..44e0e452 --- /dev/null +++ b/node_modules/source-map/lib/util.js @@ -0,0 +1,417 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +/** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ +function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } +} +exports.getArg = getArg; + +var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/; +var dataUrlRegexp = /^data:.+\,.+$/; + +function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; +} +exports.urlParse = urlParse; + +function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; +} +exports.urlGenerate = urlGenerate; + +/** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ +function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; +} +exports.normalize = normalize; + +/** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ +function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; +} +exports.join = join; + +exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp); +}; + +/** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ +function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); +} +exports.relative = relative; + +var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); +}()); + +function identity (s) { + return s; +} + +/** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ +function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; +} +exports.toSetString = supportsNullProto ? identity : toSetString; + +function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; +} +exports.fromSetString = supportsNullProto ? identity : fromSetString; + +function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; +} + +/** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ +function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; +} +exports.compareByOriginalPositions = compareByOriginalPositions; + +/** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ +function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; +} +exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + +function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; +} + +/** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ +function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; diff --git a/node_modules/source-map/package.json b/node_modules/source-map/package.json new file mode 100644 index 00000000..048e3ae8 --- /dev/null +++ b/node_modules/source-map/package.json @@ -0,0 +1,72 @@ +{ + "name": "source-map", + "description": "Generates and consumes source maps", + "version": "0.5.7", + "homepage": "https://github.com/mozilla/source-map", + "author": "Nick Fitzgerald ", + "contributors": [ + "Tobias Koppers ", + "Duncan Beevers ", + "Stephen Crane ", + "Ryan Seddon ", + "Miles Elam ", + "Mihai Bazon ", + "Michael Ficarra ", + "Todd Wolfson ", + "Alexander Solovyov ", + "Felix Gnass ", + "Conrad Irwin ", + "usrbincc ", + "David Glasser ", + "Chase Douglas ", + "Evan Wallace ", + "Heather Arthur ", + "Hugh Kennedy ", + "David Glasser ", + "Simon Lydell ", + "Jmeas Smith ", + "Michael Z Goddard ", + "azu ", + "John Gozde ", + "Adam Kirkton ", + "Chris Montgomery ", + "J. Ryan Stinnett ", + "Jack Herrington ", + "Chris Truter ", + "Daniel Espeset ", + "Jamie Wong ", + "Eddy Bruël ", + "Hawken Rives ", + "Gilad Peleg ", + "djchie ", + "Gary Ye ", + "Nicolas Lalevée " + ], + "repository": { + "type": "git", + "url": "http://github.com/mozilla/source-map.git" + }, + "main": "./source-map.js", + "files": [ + "source-map.js", + "lib/", + "dist/source-map.debug.js", + "dist/source-map.js", + "dist/source-map.min.js", + "dist/source-map.min.js.map" + ], + "engines": { + "node": ">=0.10.0" + }, + "license": "BSD-3-Clause", + "scripts": { + "test": "npm run build && node test/run-tests.js", + "build": "webpack --color", + "toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md" + }, + "devDependencies": { + "doctoc": "^0.15.0", + "webpack": "^1.12.0" + }, + "typings": "source-map" +} diff --git a/node_modules/source-map/source-map.js b/node_modules/source-map/source-map.js new file mode 100644 index 00000000..bc88fe82 --- /dev/null +++ b/node_modules/source-map/source-map.js @@ -0,0 +1,8 @@ +/* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ +exports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator; +exports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer; +exports.SourceNode = require('./lib/source-node').SourceNode; diff --git a/node_modules/split-string/LICENSE b/node_modules/split-string/LICENSE new file mode 100644 index 00000000..e33d14b7 --- /dev/null +++ b/node_modules/split-string/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/split-string/README.md b/node_modules/split-string/README.md new file mode 100644 index 00000000..d622e44d --- /dev/null +++ b/node_modules/split-string/README.md @@ -0,0 +1,321 @@ +# split-string [![NPM version](https://img.shields.io/npm/v/split-string.svg?style=flat)](https://www.npmjs.com/package/split-string) [![NPM monthly downloads](https://img.shields.io/npm/dm/split-string.svg?style=flat)](https://npmjs.org/package/split-string) [![NPM total downloads](https://img.shields.io/npm/dt/split-string.svg?style=flat)](https://npmjs.org/package/split-string) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/split-string.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/split-string) + +> Split a string on a character except when the character is escaped. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save split-string +``` + + + +
+Why use this? + +
+ +Although it's easy to split on a string: + +```js +console.log('a.b.c'.split('.')); +//=> ['a', 'b', 'c'] +``` + +It's more challenging to split a string whilst respecting escaped or quoted characters. + +**Bad** + +```js +console.log('a\\.b.c'.split('.')); +//=> ['a\\', 'b', 'c'] + +console.log('"a.b.c".d'.split('.')); +//=> ['"a', 'b', 'c"', 'd'] +``` + +**Good** + +```js +var split = require('split-string'); +console.log(split('a\\.b.c')); +//=> ['a.b', 'c'] + +console.log(split('"a.b.c".d')); +//=> ['a.b.c', 'd'] +``` + +See the [options](#options) to learn how to choose the separator or retain quotes or escaping. + +
+ +
+ +## Usage + +```js +var split = require('split-string'); + +split('a.b.c'); +//=> ['a', 'b', 'c'] + +// respects escaped characters +split('a.b.c\\.d'); +//=> ['a', 'b', 'c.d'] + +// respects double-quoted strings +split('a."b.c.d".e'); +//=> ['a', 'b.c.d', 'e'] +``` + +**Brackets** + +Also respects brackets [unless disabled](#optionsbrackets): + +```js +split('a (b c d) e', ' '); +//=> ['a', '(b c d)', 'e'] +``` + +## Options + +### options.brackets + +**Type**: `object|boolean` + +**Default**: `undefined` + +**Description** + +If enabled, split-string will not split inside brackets. The following brackets types are supported when `options.brackets` is `true`, + +```js +{ + '<': '>', + '(': ')', + '[': ']', + '{': '}' +} +``` + +Or, if object of brackets must be passed, each property on the object must be a bracket type, where the property key is the opening delimiter and property value is the closing delimiter. + +**Examples** + +```js +// no bracket support by default +split('a.{b.c}'); +//=> [ 'a', '{b', 'c}' ] + +// support all basic bracket types: "<>{}[]()" +split('a.{b.c}', {brackets: true}); +//=> [ 'a', '{b.c}' ] + +// also supports nested brackets +split('a.{b.{c.d}.e}.f', {brackets: true}); +//=> [ 'a', '{b.{c.d}.e}', 'f' ] + +// support only the specified brackets +split('[a.b].(c.d)', {brackets: {'[': ']'}}); +//=> [ '[a.b]', '(c', 'd)' ] +``` + +### options.sep + +**Type**: `string` + +**Default**: `.` + +The separator/character to split on. + +**Example** + +```js +split('a.b,c', {sep: ','}); +//=> ['a.b', 'c'] + +// you can also pass the separator as string as the last argument +split('a.b,c', ','); +//=> ['a.b', 'c'] +``` + +### options.keepEscaping + +**Type**: `boolean` + +**Default**: `undefined` + +Keep backslashes in the result. + +**Example** + +```js +split('a.b\\.c'); +//=> ['a', 'b.c'] + +split('a.b.\\c', {keepEscaping: true}); +//=> ['a', 'b\.c'] +``` + +### options.keepQuotes + +**Type**: `boolean` + +**Default**: `undefined` + +Keep single- or double-quotes in the result. + +**Example** + +```js +split('a."b.c.d".e'); +//=> ['a', 'b.c.d', 'e'] + +split('a."b.c.d".e', {keepQuotes: true}); +//=> ['a', '"b.c.d"', 'e'] + +split('a.\'b.c.d\'.e', {keepQuotes: true}); +//=> ['a', '\'b.c.d\'', 'e'] +``` + +### options.keepDoubleQuotes + +**Type**: `boolean` + +**Default**: `undefined` + +Keep double-quotes in the result. + +**Example** + +```js +split('a."b.c.d".e'); +//=> ['a', 'b.c.d', 'e'] + +split('a."b.c.d".e', {keepDoubleQuotes: true}); +//=> ['a', '"b.c.d"', 'e'] +``` + +### options.keepSingleQuotes + +**Type**: `boolean` + +**Default**: `undefined` + +Keep single-quotes in the result. + +**Example** + +```js +split('a.\'b.c.d\'.e'); +//=> ['a', 'b.c.d', 'e'] + +split('a.\'b.c.d\'.e', {keepSingleQuotes: true}); +//=> ['a', '\'b.c.d\'', 'e'] +``` + +## Customizer + +**Type**: `function` + +**Default**: `undefined` + +Pass a function as the last argument to customize how tokens are added to the array. + +**Example** + +```js +var arr = split('a.b', function(tok) { + if (tok.arr[tok.arr.length - 1] === 'a') { + tok.split = false; + } +}); +console.log(arr); +//=> ['a.b'] +``` + +**Properties** + +The `tok` object has the following properties: + +* `tok.val` (string) The current value about to be pushed onto the result array +* `tok.idx` (number) the current index in the string +* `tok.str` (string) the entire string +* `tok.arr` (array) the result array + +## Release history + +### v3.0.0 - 2017-06-17 + +**Added** + +* adds support for brackets + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [deromanize](https://www.npmjs.com/package/deromanize): Convert roman numerals to arabic numbers (useful for books, outlines, documentation, slide decks, etc) | [homepage](https://github.com/jonschlinkert/deromanize "Convert roman numerals to arabic numbers (useful for books, outlines, documentation, slide decks, etc)") +* [randomatic](https://www.npmjs.com/package/randomatic): Generate randomized strings of a specified length using simple character sequences. The original generate-password. | [homepage](https://github.com/jonschlinkert/randomatic "Generate randomized strings of a specified length using simple character sequences. The original generate-password.") +* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string "Repeat the given string n times. Fastest implementation for repeating a string.") +* [romanize](https://www.npmjs.com/package/romanize): Convert numbers to roman numerals (useful for books, outlines, documentation, slide decks, etc) | [homepage](https://github.com/jonschlinkert/romanize "Convert numbers to roman numerals (useful for books, outlines, documentation, slide decks, etc)") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 28 | [jonschlinkert](https://github.com/jonschlinkert) | +| 9 | [doowb](https://github.com/doowb) | + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on November 19, 2017._ \ No newline at end of file diff --git a/node_modules/split-string/index.js b/node_modules/split-string/index.js new file mode 100644 index 00000000..7bc0ea91 --- /dev/null +++ b/node_modules/split-string/index.js @@ -0,0 +1,171 @@ +/*! + * split-string + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var extend = require('extend-shallow'); + +module.exports = function(str, options, fn) { + if (typeof str !== 'string') { + throw new TypeError('expected a string'); + } + + if (typeof options === 'function') { + fn = options; + options = null; + } + + // allow separator to be defined as a string + if (typeof options === 'string') { + options = { sep: options }; + } + + var opts = extend({sep: '.'}, options); + var quotes = opts.quotes || ['"', "'", '`']; + var brackets; + + if (opts.brackets === true) { + brackets = { + '<': '>', + '(': ')', + '[': ']', + '{': '}' + }; + } else if (opts.brackets) { + brackets = opts.brackets; + } + + var tokens = []; + var stack = []; + var arr = ['']; + var sep = opts.sep; + var len = str.length; + var idx = -1; + var closeIdx; + + function expected() { + if (brackets && stack.length) { + return brackets[stack[stack.length - 1]]; + } + } + + while (++idx < len) { + var ch = str[idx]; + var next = str[idx + 1]; + var tok = { val: ch, idx: idx, arr: arr, str: str }; + tokens.push(tok); + + if (ch === '\\') { + tok.val = keepEscaping(opts, str, idx) === true ? (ch + next) : next; + tok.escaped = true; + if (typeof fn === 'function') { + fn(tok); + } + arr[arr.length - 1] += tok.val; + idx++; + continue; + } + + if (brackets && brackets[ch]) { + stack.push(ch); + var e = expected(); + var i = idx + 1; + + if (str.indexOf(e, i + 1) !== -1) { + while (stack.length && i < len) { + var s = str[++i]; + if (s === '\\') { + s++; + continue; + } + + if (quotes.indexOf(s) !== -1) { + i = getClosingQuote(str, s, i + 1); + continue; + } + + e = expected(); + if (stack.length && str.indexOf(e, i + 1) === -1) { + break; + } + + if (brackets[s]) { + stack.push(s); + continue; + } + + if (e === s) { + stack.pop(); + } + } + } + + closeIdx = i; + if (closeIdx === -1) { + arr[arr.length - 1] += ch; + continue; + } + + ch = str.slice(idx, closeIdx + 1); + tok.val = ch; + tok.idx = idx = closeIdx; + } + + if (quotes.indexOf(ch) !== -1) { + closeIdx = getClosingQuote(str, ch, idx + 1); + if (closeIdx === -1) { + arr[arr.length - 1] += ch; + continue; + } + + if (keepQuotes(ch, opts) === true) { + ch = str.slice(idx, closeIdx + 1); + } else { + ch = str.slice(idx + 1, closeIdx); + } + + tok.val = ch; + tok.idx = idx = closeIdx; + } + + if (typeof fn === 'function') { + fn(tok, tokens); + ch = tok.val; + idx = tok.idx; + } + + if (tok.val === sep && tok.split !== false) { + arr.push(''); + continue; + } + + arr[arr.length - 1] += tok.val; + } + + return arr; +}; + +function getClosingQuote(str, ch, i, brackets) { + var idx = str.indexOf(ch, i); + if (str.charAt(idx - 1) === '\\') { + return getClosingQuote(str, ch, idx + 1); + } + return idx; +} + +function keepQuotes(ch, opts) { + if (opts.keepDoubleQuotes === true && ch === '"') return true; + if (opts.keepSingleQuotes === true && ch === "'") return true; + return opts.keepQuotes; +} + +function keepEscaping(opts, str, idx) { + if (typeof opts.keepEscaping === 'function') { + return opts.keepEscaping(str, idx); + } + return opts.keepEscaping === true || str[idx + 1] === '\\'; +} diff --git a/node_modules/split-string/node_modules/extend-shallow/LICENSE b/node_modules/split-string/node_modules/extend-shallow/LICENSE new file mode 100644 index 00000000..99c93691 --- /dev/null +++ b/node_modules/split-string/node_modules/extend-shallow/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, 2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/split-string/node_modules/extend-shallow/README.md b/node_modules/split-string/node_modules/extend-shallow/README.md new file mode 100644 index 00000000..dee226f4 --- /dev/null +++ b/node_modules/split-string/node_modules/extend-shallow/README.md @@ -0,0 +1,97 @@ +# extend-shallow [![NPM version](https://img.shields.io/npm/v/extend-shallow.svg?style=flat)](https://www.npmjs.com/package/extend-shallow) [![NPM monthly downloads](https://img.shields.io/npm/dm/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![NPM total downloads](https://img.shields.io/npm/dt/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/extend-shallow.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/extend-shallow) + +> Extend an object with the properties of additional objects. node.js/javascript util. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save extend-shallow +``` + +## Usage + +```js +var extend = require('extend-shallow'); + +extend({a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +Pass an empty object to shallow clone: + +```js +var obj = {}; +extend(obj, {a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [for-in](https://www.npmjs.com/package/for-in): Iterate over the own and inherited enumerable properties of an object, and return an object… [more](https://github.com/jonschlinkert/for-in) | [homepage](https://github.com/jonschlinkert/for-in "Iterate over the own and inherited enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js") +* [for-own](https://www.npmjs.com/package/for-own): Iterate over the own enumerable properties of an object, and return an object with properties… [more](https://github.com/jonschlinkert/for-own) | [homepage](https://github.com/jonschlinkert/for-own "Iterate over the own enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 33 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [pdehaan](https://github.com/pdehaan) | + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on November 19, 2017._ \ No newline at end of file diff --git a/node_modules/split-string/node_modules/extend-shallow/index.js b/node_modules/split-string/node_modules/extend-shallow/index.js new file mode 100644 index 00000000..c9582f8f --- /dev/null +++ b/node_modules/split-string/node_modules/extend-shallow/index.js @@ -0,0 +1,60 @@ +'use strict'; + +var isExtendable = require('is-extendable'); +var assignSymbols = require('assign-symbols'); + +module.exports = Object.assign || function(obj/*, objects*/) { + if (obj === null || typeof obj === 'undefined') { + throw new TypeError('Cannot convert undefined or null to object'); + } + if (!isObject(obj)) { + obj = {}; + } + for (var i = 1; i < arguments.length; i++) { + var val = arguments[i]; + if (isString(val)) { + val = toObject(val); + } + if (isObject(val)) { + assign(obj, val); + assignSymbols(obj, val); + } + } + return obj; +}; + +function assign(a, b) { + for (var key in b) { + if (hasOwn(b, key)) { + a[key] = b[key]; + } + } +} + +function isString(val) { + return (val && typeof val === 'string'); +} + +function toObject(str) { + var obj = {}; + for (var i in str) { + obj[i] = str[i]; + } + return obj; +} + +function isObject(val) { + return (val && typeof val === 'object') || isExtendable(val); +} + +/** + * Returns true if the given `key` is an own property of `obj`. + */ + +function hasOwn(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +function isEnum(obj, key) { + return Object.prototype.propertyIsEnumerable.call(obj, key); +} diff --git a/node_modules/split-string/node_modules/extend-shallow/package.json b/node_modules/split-string/node_modules/extend-shallow/package.json new file mode 100644 index 00000000..e5e91053 --- /dev/null +++ b/node_modules/split-string/node_modules/extend-shallow/package.json @@ -0,0 +1,83 @@ +{ + "name": "extend-shallow", + "description": "Extend an object with the properties of additional objects. node.js/javascript util.", + "version": "3.0.2", + "homepage": "https://github.com/jonschlinkert/extend-shallow", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Peter deHaan (http://about.me/peterdehaan)" + ], + "repository": "jonschlinkert/extend-shallow", + "bugs": { + "url": "https://github.com/jonschlinkert/extend-shallow/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "devDependencies": { + "array-slice": "^1.0.0", + "benchmarked": "^2.0.0", + "for-own": "^1.0.0", + "gulp-format-md": "^1.0.0", + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.1", + "minimist": "^1.2.0", + "mocha": "^3.5.3", + "object-assign": "^4.1.1" + }, + "keywords": [ + "assign", + "clone", + "extend", + "merge", + "obj", + "object", + "object-assign", + "object.assign", + "prop", + "properties", + "property", + "props", + "shallow", + "util", + "utility", + "utils", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "related": { + "list": [ + "extend-shallow", + "for-in", + "for-own", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/split-string/node_modules/is-extendable/LICENSE b/node_modules/split-string/node_modules/is-extendable/LICENSE new file mode 100644 index 00000000..c0d7f136 --- /dev/null +++ b/node_modules/split-string/node_modules/is-extendable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/split-string/node_modules/is-extendable/README.md b/node_modules/split-string/node_modules/is-extendable/README.md new file mode 100644 index 00000000..875b56a7 --- /dev/null +++ b/node_modules/split-string/node_modules/is-extendable/README.md @@ -0,0 +1,88 @@ +# is-extendable [![NPM version](https://img.shields.io/npm/v/is-extendable.svg?style=flat)](https://www.npmjs.com/package/is-extendable) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![NPM total downloads](https://img.shields.io/npm/dt/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-extendable.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-extendable) + +> Returns true if a value is a plain object, array or function. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extendable +``` + +## Usage + +```js +var isExtendable = require('is-extendable'); +``` + +Returns true if the value is any of the following: + +* array +* plain object +* function + +## Notes + +All objects in JavaScript can have keys, but it's a pain to check for this, since we ether need to verify that the value is not `null` or `undefined` and: + +* the value is not a primitive, or +* that the object is a plain object, function or array + +Also note that an `extendable` object is not the same as an [extensible object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/isExtensible), which is one that (in es6) is not sealed, frozen, or marked as non-extensible using `preventExtensions`. + +## Release history + +### v1.0.0 - 2017/07/20 + +**Breaking changes** + +* No longer considers date, regex or error objects to be extendable + +## About + +### Related projects + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target… [more](https://github.com/jonschlinkert/assign-deep) | [homepage](https://github.com/jonschlinkert/assign-deep "Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target (first) object.") +* [is-equal-shallow](https://www.npmjs.com/package/is-equal-shallow): Does a shallow comparison of two objects, returning false if the keys or values differ. | [homepage](https://github.com/jonschlinkert/is-equal-shallow "Does a shallow comparison of two objects, returning false if the keys or values differ.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 20, 2017._ \ No newline at end of file diff --git a/node_modules/split-string/node_modules/is-extendable/index.d.ts b/node_modules/split-string/node_modules/is-extendable/index.d.ts new file mode 100644 index 00000000..b96d5075 --- /dev/null +++ b/node_modules/split-string/node_modules/is-extendable/index.d.ts @@ -0,0 +1,5 @@ +export = isExtendable; + +declare function isExtendable(val: any): boolean; + +declare namespace isExtendable {} diff --git a/node_modules/split-string/node_modules/is-extendable/index.js b/node_modules/split-string/node_modules/is-extendable/index.js new file mode 100644 index 00000000..a8b26ad0 --- /dev/null +++ b/node_modules/split-string/node_modules/is-extendable/index.js @@ -0,0 +1,14 @@ +/*! + * is-extendable + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isPlainObject = require('is-plain-object'); + +module.exports = function isExtendable(val) { + return isPlainObject(val) || typeof val === 'function' || Array.isArray(val); +}; diff --git a/node_modules/split-string/node_modules/is-extendable/package.json b/node_modules/split-string/node_modules/is-extendable/package.json new file mode 100644 index 00000000..2aaab65a --- /dev/null +++ b/node_modules/split-string/node_modules/is-extendable/package.json @@ -0,0 +1,67 @@ +{ + "name": "is-extendable", + "description": "Returns true if a value is a plain object, array or function.", + "version": "1.0.1", + "homepage": "https://github.com/jonschlinkert/is-extendable", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extendable", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extendable/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "index.d.ts" + ], + "main": "index.js", + "types": "index.d.ts", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.4.2" + }, + "keywords": [ + "array", + "assign", + "check", + "date", + "extend", + "extendable", + "extensible", + "function", + "is", + "object", + "regex", + "test" + ], + "verb": { + "related": { + "list": [ + "assign-deep", + "is-equal-shallow", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/split-string/package.json b/node_modules/split-string/package.json new file mode 100644 index 00000000..8f490a24 --- /dev/null +++ b/node_modules/split-string/package.json @@ -0,0 +1,65 @@ +{ + "name": "split-string", + "description": "Split a string on a character except when the character is escaped.", + "version": "3.1.0", + "homepage": "https://github.com/jonschlinkert/split-string", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/split-string", + "bugs": { + "url": "https://github.com/jonschlinkert/split-string/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "extend-shallow": "^3.0.0" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "character", + "escape", + "split", + "string" + ], + "verb": { + "toc": false, + "layout": "default", + "titles": [ + ".", + "install", + "Why use this?" + ], + "related": { + "list": [ + "deromanize", + "randomatic", + "repeat-string", + "romanize" + ] + }, + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/static-extend/LICENSE b/node_modules/static-extend/LICENSE new file mode 100644 index 00000000..e28e6032 --- /dev/null +++ b/node_modules/static-extend/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/static-extend/index.js b/node_modules/static-extend/index.js new file mode 100644 index 00000000..f4124b2a --- /dev/null +++ b/node_modules/static-extend/index.js @@ -0,0 +1,90 @@ +/*! + * static-extend + * + * Copyright (c) 2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var copy = require('object-copy'); +var define = require('define-property'); +var util = require('util'); + +/** + * Returns a function for extending the static properties, + * prototype properties, and descriptors from the `Parent` + * constructor onto `Child` constructors. + * + * ```js + * var extend = require('static-extend'); + * Parent.extend = extend(Parent); + * + * // optionally pass a custom merge function as the second arg + * Parent.extend = extend(Parent, function(Child) { + * Child.prototype.mixin = function(key, val) { + * Child.prototype[key] = val; + * }; + * }); + * + * // extend "child" constructors + * Parent.extend(Child); + * + * // optionally define prototype methods as the second arg + * Parent.extend(Child, { + * foo: function() {}, + * bar: function() {} + * }); + * ``` + * @param {Function} `Parent` Parent ctor + * @param {Function} `extendFn` Optional extend function for handling any necessary custom merging. Useful when updating methods that require a specific prototype. + * @param {Function} `Child` Child ctor + * @param {Object} `proto` Optionally pass additional prototype properties to inherit. + * @return {Object} + * @api public + */ + +function extend(Parent, extendFn) { + if (typeof Parent !== 'function') { + throw new TypeError('expected Parent to be a function.'); + } + + return function(Ctor, proto) { + if (typeof Ctor !== 'function') { + throw new TypeError('expected Ctor to be a function.'); + } + + util.inherits(Ctor, Parent); + copy(Ctor, Parent); + + // proto can be null or a plain object + if (typeof proto === 'object') { + var obj = Object.create(proto); + + for (var k in obj) { + Ctor.prototype[k] = obj[k]; + } + } + + // keep a reference to the parent prototype + define(Ctor.prototype, '_parent_', { + configurable: true, + set: function() {}, + get: function() { + return Parent.prototype; + } + }); + + if (typeof extendFn === 'function') { + extendFn(Ctor, Parent); + } + + Ctor.extend = extend(Ctor, extendFn); + }; +}; + +/** + * Expose `extend` + */ + +module.exports = extend; diff --git a/node_modules/static-extend/package.json b/node_modules/static-extend/package.json new file mode 100644 index 00000000..a63ab950 --- /dev/null +++ b/node_modules/static-extend/package.json @@ -0,0 +1,63 @@ +{ + "name": "static-extend", + "description": "Adds a static `extend` method to a class, to simplify inheritance. Extends the static properties, prototype properties, and descriptors from a `Parent` constructor onto `Child` constructors.", + "version": "0.1.2", + "homepage": "https://github.com/jonschlinkert/static-extend", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/static-extend", + "bugs": { + "url": "https://github.com/jonschlinkert/static-extend/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "define-property": "^0.2.5", + "object-copy": "^0.1.0" + }, + "devDependencies": { + "gulp-format-md": "^0.1.9", + "mocha": "^2.5.3" + }, + "keywords": [ + "class", + "ctor", + "descriptor", + "extend", + "extends", + "inherit", + "inheritance", + "merge", + "method", + "prop", + "properties", + "property", + "prototype" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "reflinks": [ + "verb", + "verb-readme-generator" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/statuses/HISTORY.md b/node_modules/statuses/HISTORY.md new file mode 100644 index 00000000..a1977b29 --- /dev/null +++ b/node_modules/statuses/HISTORY.md @@ -0,0 +1,65 @@ +1.5.0 / 2018-03-27 +================== + + * Add `103 Early Hints` + +1.4.0 / 2017-10-20 +================== + + * Add `STATUS_CODES` export + +1.3.1 / 2016-11-11 +================== + + * Fix return type in JSDoc + +1.3.0 / 2016-05-17 +================== + + * Add `421 Misdirected Request` + * perf: enable strict mode + +1.2.1 / 2015-02-01 +================== + + * Fix message for status 451 + - `451 Unavailable For Legal Reasons` + +1.2.0 / 2014-09-28 +================== + + * Add `208 Already Repored` + * Add `226 IM Used` + * Add `306 (Unused)` + * Add `415 Unable For Legal Reasons` + * Add `508 Loop Detected` + +1.1.1 / 2014-09-24 +================== + + * Add missing 308 to `codes.json` + +1.1.0 / 2014-09-21 +================== + + * Add `codes.json` for universal support + +1.0.4 / 2014-08-20 +================== + + * Package cleanup + +1.0.3 / 2014-06-08 +================== + + * Add 308 to `.redirect` category + +1.0.2 / 2014-03-13 +================== + + * Add `.retry` category + +1.0.1 / 2014-03-12 +================== + + * Initial release diff --git a/node_modules/statuses/LICENSE b/node_modules/statuses/LICENSE new file mode 100644 index 00000000..28a31618 --- /dev/null +++ b/node_modules/statuses/LICENSE @@ -0,0 +1,23 @@ + +The MIT License (MIT) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/statuses/README.md b/node_modules/statuses/README.md new file mode 100644 index 00000000..0fe5720d --- /dev/null +++ b/node_modules/statuses/README.md @@ -0,0 +1,127 @@ +# Statuses + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +HTTP status utility for node. + +This module provides a list of status codes and messages sourced from +a few different projects: + + * The [IANA Status Code Registry](https://www.iana.org/assignments/http-status-codes/http-status-codes.xhtml) + * The [Node.js project](https://nodejs.org/) + * The [NGINX project](https://www.nginx.com/) + * The [Apache HTTP Server project](https://httpd.apache.org/) + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install statuses +``` + +## API + + + +```js +var status = require('statuses') +``` + +### var code = status(Integer || String) + +If `Integer` or `String` is a valid HTTP code or status message, then the +appropriate `code` will be returned. Otherwise, an error will be thrown. + + + +```js +status(403) // => 403 +status('403') // => 403 +status('forbidden') // => 403 +status('Forbidden') // => 403 +status(306) // throws, as it's not supported by node.js +``` + +### status.STATUS_CODES + +Returns an object which maps status codes to status messages, in +the same format as the +[Node.js http module](https://nodejs.org/dist/latest/docs/api/http.html#http_http_status_codes). + +### status.codes + +Returns an array of all the status codes as `Integer`s. + +### var msg = status[code] + +Map of `code` to `status message`. `undefined` for invalid `code`s. + + + +```js +status[404] // => 'Not Found' +``` + +### var code = status[msg] + +Map of `status message` to `code`. `msg` can either be title-cased or +lower-cased. `undefined` for invalid `status message`s. + + + +```js +status['not found'] // => 404 +status['Not Found'] // => 404 +``` + +### status.redirect[code] + +Returns `true` if a status code is a valid redirect status. + + + +```js +status.redirect[200] // => undefined +status.redirect[301] // => true +``` + +### status.empty[code] + +Returns `true` if a status code expects an empty body. + + + +```js +status.empty[200] // => undefined +status.empty[204] // => true +status.empty[304] // => true +``` + +### status.retry[code] + +Returns `true` if you should retry the rest. + + + +```js +status.retry[501] // => undefined +status.retry[503] // => true +``` + +[npm-image]: https://img.shields.io/npm/v/statuses.svg +[npm-url]: https://npmjs.org/package/statuses +[node-version-image]: https://img.shields.io/node/v/statuses.svg +[node-version-url]: https://nodejs.org/en/download +[travis-image]: https://img.shields.io/travis/jshttp/statuses.svg +[travis-url]: https://travis-ci.org/jshttp/statuses +[coveralls-image]: https://img.shields.io/coveralls/jshttp/statuses.svg +[coveralls-url]: https://coveralls.io/r/jshttp/statuses?branch=master +[downloads-image]: https://img.shields.io/npm/dm/statuses.svg +[downloads-url]: https://npmjs.org/package/statuses diff --git a/node_modules/statuses/codes.json b/node_modules/statuses/codes.json new file mode 100644 index 00000000..a09283a2 --- /dev/null +++ b/node_modules/statuses/codes.json @@ -0,0 +1,66 @@ +{ + "100": "Continue", + "101": "Switching Protocols", + "102": "Processing", + "103": "Early Hints", + "200": "OK", + "201": "Created", + "202": "Accepted", + "203": "Non-Authoritative Information", + "204": "No Content", + "205": "Reset Content", + "206": "Partial Content", + "207": "Multi-Status", + "208": "Already Reported", + "226": "IM Used", + "300": "Multiple Choices", + "301": "Moved Permanently", + "302": "Found", + "303": "See Other", + "304": "Not Modified", + "305": "Use Proxy", + "306": "(Unused)", + "307": "Temporary Redirect", + "308": "Permanent Redirect", + "400": "Bad Request", + "401": "Unauthorized", + "402": "Payment Required", + "403": "Forbidden", + "404": "Not Found", + "405": "Method Not Allowed", + "406": "Not Acceptable", + "407": "Proxy Authentication Required", + "408": "Request Timeout", + "409": "Conflict", + "410": "Gone", + "411": "Length Required", + "412": "Precondition Failed", + "413": "Payload Too Large", + "414": "URI Too Long", + "415": "Unsupported Media Type", + "416": "Range Not Satisfiable", + "417": "Expectation Failed", + "418": "I'm a teapot", + "421": "Misdirected Request", + "422": "Unprocessable Entity", + "423": "Locked", + "424": "Failed Dependency", + "425": "Unordered Collection", + "426": "Upgrade Required", + "428": "Precondition Required", + "429": "Too Many Requests", + "431": "Request Header Fields Too Large", + "451": "Unavailable For Legal Reasons", + "500": "Internal Server Error", + "501": "Not Implemented", + "502": "Bad Gateway", + "503": "Service Unavailable", + "504": "Gateway Timeout", + "505": "HTTP Version Not Supported", + "506": "Variant Also Negotiates", + "507": "Insufficient Storage", + "508": "Loop Detected", + "509": "Bandwidth Limit Exceeded", + "510": "Not Extended", + "511": "Network Authentication Required" +} diff --git a/node_modules/statuses/index.js b/node_modules/statuses/index.js new file mode 100644 index 00000000..4df469a0 --- /dev/null +++ b/node_modules/statuses/index.js @@ -0,0 +1,113 @@ +/*! + * statuses + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var codes = require('./codes.json') + +/** + * Module exports. + * @public + */ + +module.exports = status + +// status code to message map +status.STATUS_CODES = codes + +// array of status codes +status.codes = populateStatusesMap(status, codes) + +// status codes for redirects +status.redirect = { + 300: true, + 301: true, + 302: true, + 303: true, + 305: true, + 307: true, + 308: true +} + +// status codes for empty bodies +status.empty = { + 204: true, + 205: true, + 304: true +} + +// status codes for when you should retry the request +status.retry = { + 502: true, + 503: true, + 504: true +} + +/** + * Populate the statuses map for given codes. + * @private + */ + +function populateStatusesMap (statuses, codes) { + var arr = [] + + Object.keys(codes).forEach(function forEachCode (code) { + var message = codes[code] + var status = Number(code) + + // Populate properties + statuses[status] = message + statuses[message] = status + statuses[message.toLowerCase()] = status + + // Add to array + arr.push(status) + }) + + return arr +} + +/** + * Get the status code. + * + * Given a number, this will throw if it is not a known status + * code, otherwise the code will be returned. Given a string, + * the string will be parsed for a number and return the code + * if valid, otherwise will lookup the code assuming this is + * the status message. + * + * @param {string|number} code + * @returns {number} + * @public + */ + +function status (code) { + if (typeof code === 'number') { + if (!status[code]) throw new Error('invalid status code: ' + code) + return code + } + + if (typeof code !== 'string') { + throw new TypeError('code must be a number or string') + } + + // '403' + var n = parseInt(code, 10) + if (!isNaN(n)) { + if (!status[n]) throw new Error('invalid status code: ' + n) + return n + } + + n = status[code.toLowerCase()] + if (!n) throw new Error('invalid status message: "' + code + '"') + return n +} diff --git a/node_modules/statuses/package.json b/node_modules/statuses/package.json new file mode 100644 index 00000000..7595e2ba --- /dev/null +++ b/node_modules/statuses/package.json @@ -0,0 +1,48 @@ +{ + "name": "statuses", + "description": "HTTP status utility", + "version": "1.5.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "repository": "jshttp/statuses", + "license": "MIT", + "keywords": [ + "http", + "status", + "code" + ], + "files": [ + "HISTORY.md", + "index.js", + "codes.json", + "LICENSE" + ], + "devDependencies": { + "csv-parse": "1.2.4", + "eslint": "4.19.1", + "eslint-config-standard": "11.0.0", + "eslint-plugin-import": "2.9.0", + "eslint-plugin-markdown": "1.0.0-beta.6", + "eslint-plugin-node": "6.0.1", + "eslint-plugin-promise": "3.7.0", + "eslint-plugin-standard": "3.0.1", + "istanbul": "0.4.5", + "mocha": "1.21.5", + "raw-body": "2.3.2", + "stream-to-array": "2.3.0" + }, + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "build": "node scripts/build.js", + "fetch": "node scripts/fetch-apache.js && node scripts/fetch-iana.js && node scripts/fetch-nginx.js && node scripts/fetch-node.js", + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-ci": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "update": "npm run fetch && npm run build" + } +} diff --git a/node_modules/string-width/index.js b/node_modules/string-width/index.js new file mode 100644 index 00000000..bbc49d29 --- /dev/null +++ b/node_modules/string-width/index.js @@ -0,0 +1,36 @@ +'use strict'; +const stripAnsi = require('strip-ansi'); +const isFullwidthCodePoint = require('is-fullwidth-code-point'); + +module.exports = str => { + if (typeof str !== 'string' || str.length === 0) { + return 0; + } + + str = stripAnsi(str); + + let width = 0; + + for (let i = 0; i < str.length; i++) { + const code = str.codePointAt(i); + + // Ignore control characters + if (code <= 0x1F || (code >= 0x7F && code <= 0x9F)) { + continue; + } + + // Ignore combining characters + if (code >= 0x300 && code <= 0x36F) { + continue; + } + + // Surrogates + if (code > 0xFFFF) { + i++; + } + + width += isFullwidthCodePoint(code) ? 2 : 1; + } + + return width; +}; diff --git a/node_modules/string-width/license b/node_modules/string-width/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/string-width/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/string-width/node_modules/ansi-regex/index.js b/node_modules/string-width/node_modules/ansi-regex/index.js new file mode 100644 index 00000000..c4aaecf5 --- /dev/null +++ b/node_modules/string-width/node_modules/ansi-regex/index.js @@ -0,0 +1,10 @@ +'use strict'; + +module.exports = () => { + const pattern = [ + '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[a-zA-Z\\d]*)*)?\\u0007)', + '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PRZcf-ntqry=><~]))' + ].join('|'); + + return new RegExp(pattern, 'g'); +}; diff --git a/node_modules/string-width/node_modules/ansi-regex/license b/node_modules/string-width/node_modules/ansi-regex/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/string-width/node_modules/ansi-regex/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/string-width/node_modules/ansi-regex/package.json b/node_modules/string-width/node_modules/ansi-regex/package.json new file mode 100644 index 00000000..e94852fd --- /dev/null +++ b/node_modules/string-width/node_modules/ansi-regex/package.json @@ -0,0 +1,53 @@ +{ + "name": "ansi-regex", + "version": "3.0.0", + "description": "Regular expression for matching ANSI escape codes", + "license": "MIT", + "repository": "chalk/ansi-regex", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava", + "view-supported": "node fixtures/view-codes.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "text", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "pattern" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/string-width/node_modules/ansi-regex/readme.md b/node_modules/string-width/node_modules/ansi-regex/readme.md new file mode 100644 index 00000000..22db1c34 --- /dev/null +++ b/node_modules/string-width/node_modules/ansi-regex/readme.md @@ -0,0 +1,46 @@ +# ansi-regex [![Build Status](https://travis-ci.org/chalk/ansi-regex.svg?branch=master)](https://travis-ci.org/chalk/ansi-regex) + +> Regular expression for matching [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install ansi-regex +``` + + +## Usage + +```js +const ansiRegex = require('ansi-regex'); + +ansiRegex().test('\u001B[4mcake\u001B[0m'); +//=> true + +ansiRegex().test('cake'); +//=> false + +'\u001B[4mcake\u001B[0m'.match(ansiRegex()); +//=> ['\u001B[4m', '\u001B[0m'] +``` + + +## FAQ + +### Why do you test for codes not in the ECMA 48 standard? + +Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. We test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them. + +On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out. + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/string-width/node_modules/strip-ansi/index.js b/node_modules/string-width/node_modules/strip-ansi/index.js new file mode 100644 index 00000000..96e0292c --- /dev/null +++ b/node_modules/string-width/node_modules/strip-ansi/index.js @@ -0,0 +1,4 @@ +'use strict'; +const ansiRegex = require('ansi-regex'); + +module.exports = input => typeof input === 'string' ? input.replace(ansiRegex(), '') : input; diff --git a/node_modules/string-width/node_modules/strip-ansi/license b/node_modules/string-width/node_modules/strip-ansi/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/string-width/node_modules/strip-ansi/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/string-width/node_modules/strip-ansi/package.json b/node_modules/string-width/node_modules/strip-ansi/package.json new file mode 100644 index 00000000..555f1946 --- /dev/null +++ b/node_modules/string-width/node_modules/strip-ansi/package.json @@ -0,0 +1,52 @@ +{ + "name": "strip-ansi", + "version": "4.0.0", + "description": "Strip ANSI escape codes", + "license": "MIT", + "repository": "chalk/strip-ansi", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "strip", + "trim", + "remove", + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-regex": "^3.0.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/string-width/node_modules/strip-ansi/readme.md b/node_modules/string-width/node_modules/strip-ansi/readme.md new file mode 100644 index 00000000..dc76f0cb --- /dev/null +++ b/node_modules/string-width/node_modules/strip-ansi/readme.md @@ -0,0 +1,39 @@ +# strip-ansi [![Build Status](https://travis-ci.org/chalk/strip-ansi.svg?branch=master)](https://travis-ci.org/chalk/strip-ansi) + +> Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install strip-ansi +``` + + +## Usage + +```js +const stripAnsi = require('strip-ansi'); + +stripAnsi('\u001B[4mUnicorn\u001B[0m'); +//=> 'Unicorn' +``` + + +## Related + +- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module +- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes +- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/string-width/package.json b/node_modules/string-width/package.json new file mode 100644 index 00000000..89f0b6a6 --- /dev/null +++ b/node_modules/string-width/package.json @@ -0,0 +1,55 @@ +{ + "name": "string-width", + "version": "2.1.1", + "description": "Get the visual width of a string - the number of columns required to display it", + "license": "MIT", + "repository": "sindresorhus/string-width", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "string", + "str", + "character", + "char", + "unicode", + "width", + "visual", + "column", + "columns", + "fullwidth", + "full-width", + "full", + "ansi", + "escape", + "codes", + "cli", + "command-line", + "terminal", + "console", + "cjk", + "chinese", + "japanese", + "korean", + "fixed-width" + ], + "dependencies": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/string-width/readme.md b/node_modules/string-width/readme.md new file mode 100644 index 00000000..df5b7199 --- /dev/null +++ b/node_modules/string-width/readme.md @@ -0,0 +1,42 @@ +# string-width [![Build Status](https://travis-ci.org/sindresorhus/string-width.svg?branch=master)](https://travis-ci.org/sindresorhus/string-width) + +> Get the visual width of a string - the number of columns required to display it + +Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. + +Useful to be able to measure the actual width of command-line output. + + +## Install + +``` +$ npm install string-width +``` + + +## Usage + +```js +const stringWidth = require('string-width'); + +stringWidth('古'); +//=> 2 + +stringWidth('\u001b[1m古\u001b[22m'); +//=> 2 + +stringWidth('a'); +//=> 1 +``` + + +## Related + +- [string-width-cli](https://github.com/sindresorhus/string-width-cli) - CLI for this module +- [string-length](https://github.com/sindresorhus/string-length) - Get the real length of a string +- [widest-line](https://github.com/sindresorhus/widest-line) - Get the visual width of the widest line in a string + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/string_decoder/.travis.yml b/node_modules/string_decoder/.travis.yml new file mode 100644 index 00000000..3347a725 --- /dev/null +++ b/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/string_decoder/LICENSE b/node_modules/string_decoder/LICENSE new file mode 100644 index 00000000..778edb20 --- /dev/null +++ b/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md new file mode 100644 index 00000000..5fd58315 --- /dev/null +++ b/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/string_decoder/lib/string_decoder.js b/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 00000000..2e89e63f --- /dev/null +++ b/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/string_decoder/package.json b/node_modules/string_decoder/package.json new file mode 100644 index 00000000..518c3eb9 --- /dev/null +++ b/node_modules/string_decoder/package.json @@ -0,0 +1,31 @@ +{ + "name": "string_decoder", + "version": "1.1.1", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/node_modules/strip-ansi/index.js b/node_modules/strip-ansi/index.js new file mode 100644 index 00000000..099480fb --- /dev/null +++ b/node_modules/strip-ansi/index.js @@ -0,0 +1,6 @@ +'use strict'; +var ansiRegex = require('ansi-regex')(); + +module.exports = function (str) { + return typeof str === 'string' ? str.replace(ansiRegex, '') : str; +}; diff --git a/node_modules/strip-ansi/license b/node_modules/strip-ansi/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/strip-ansi/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/strip-ansi/package.json b/node_modules/strip-ansi/package.json new file mode 100644 index 00000000..301685ba --- /dev/null +++ b/node_modules/strip-ansi/package.json @@ -0,0 +1,57 @@ +{ + "name": "strip-ansi", + "version": "3.0.1", + "description": "Strip ANSI escape codes", + "license": "MIT", + "repository": "chalk/strip-ansi", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "maintainers": [ + "Sindre Sorhus (sindresorhus.com)", + "Joshua Boy Nicolai Appelman (jbna.nl)", + "JD Ballard (github.com/qix-)" + ], + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "strip", + "trim", + "remove", + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/strip-ansi/readme.md b/node_modules/strip-ansi/readme.md new file mode 100644 index 00000000..cb7d9ff7 --- /dev/null +++ b/node_modules/strip-ansi/readme.md @@ -0,0 +1,33 @@ +# strip-ansi [![Build Status](https://travis-ci.org/chalk/strip-ansi.svg?branch=master)](https://travis-ci.org/chalk/strip-ansi) + +> Strip [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install --save strip-ansi +``` + + +## Usage + +```js +var stripAnsi = require('strip-ansi'); + +stripAnsi('\u001b[4mcake\u001b[0m'); +//=> 'cake' +``` + + +## Related + +- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module +- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes +- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/strip-eof/index.js b/node_modules/strip-eof/index.js new file mode 100644 index 00000000..a17d0afd --- /dev/null +++ b/node_modules/strip-eof/index.js @@ -0,0 +1,15 @@ +'use strict'; +module.exports = function (x) { + var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); + var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); + + if (x[x.length - 1] === lf) { + x = x.slice(0, x.length - 1); + } + + if (x[x.length - 1] === cr) { + x = x.slice(0, x.length - 1); + } + + return x; +}; diff --git a/node_modules/strip-eof/license b/node_modules/strip-eof/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/strip-eof/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/strip-eof/package.json b/node_modules/strip-eof/package.json new file mode 100644 index 00000000..36b88cdc --- /dev/null +++ b/node_modules/strip-eof/package.json @@ -0,0 +1,39 @@ +{ + "name": "strip-eof", + "version": "1.0.0", + "description": "Strip the End-Of-File (EOF) character from a string/buffer", + "license": "MIT", + "repository": "sindresorhus/strip-eof", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "strip", + "trim", + "remove", + "delete", + "eof", + "end", + "file", + "newline", + "linebreak", + "character", + "string", + "buffer" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/strip-eof/readme.md b/node_modules/strip-eof/readme.md new file mode 100644 index 00000000..45ffe043 --- /dev/null +++ b/node_modules/strip-eof/readme.md @@ -0,0 +1,28 @@ +# strip-eof [![Build Status](https://travis-ci.org/sindresorhus/strip-eof.svg?branch=master)](https://travis-ci.org/sindresorhus/strip-eof) + +> Strip the [End-Of-File](https://en.wikipedia.org/wiki/End-of-file) (EOF) character from a string/buffer + + +## Install + +``` +$ npm install --save strip-eof +``` + + +## Usage + +```js +const stripEof = require('strip-eof'); + +stripEof('foo\nbar\n\n'); +//=> 'foo\nbar\n' + +stripEof(new Buffer('foo\nbar\n\n')).toString(); +//=> 'foo\nbar\n' +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/strip-json-comments/index.js b/node_modules/strip-json-comments/index.js new file mode 100644 index 00000000..4e6576e6 --- /dev/null +++ b/node_modules/strip-json-comments/index.js @@ -0,0 +1,70 @@ +'use strict'; +var singleComment = 1; +var multiComment = 2; + +function stripWithoutWhitespace() { + return ''; +} + +function stripWithWhitespace(str, start, end) { + return str.slice(start, end).replace(/\S/g, ' '); +} + +module.exports = function (str, opts) { + opts = opts || {}; + + var currentChar; + var nextChar; + var insideString = false; + var insideComment = false; + var offset = 0; + var ret = ''; + var strip = opts.whitespace === false ? stripWithoutWhitespace : stripWithWhitespace; + + for (var i = 0; i < str.length; i++) { + currentChar = str[i]; + nextChar = str[i + 1]; + + if (!insideComment && currentChar === '"') { + var escaped = str[i - 1] === '\\' && str[i - 2] !== '\\'; + if (!escaped) { + insideString = !insideString; + } + } + + if (insideString) { + continue; + } + + if (!insideComment && currentChar + nextChar === '//') { + ret += str.slice(offset, i); + offset = i; + insideComment = singleComment; + i++; + } else if (insideComment === singleComment && currentChar + nextChar === '\r\n') { + i++; + insideComment = false; + ret += strip(str, offset, i); + offset = i; + continue; + } else if (insideComment === singleComment && currentChar === '\n') { + insideComment = false; + ret += strip(str, offset, i); + offset = i; + } else if (!insideComment && currentChar + nextChar === '/*') { + ret += str.slice(offset, i); + offset = i; + insideComment = multiComment; + i++; + continue; + } else if (insideComment === multiComment && currentChar + nextChar === '*/') { + i++; + insideComment = false; + ret += strip(str, offset, i + 1); + offset = i + 1; + continue; + } + } + + return ret + (insideComment ? strip(str.substr(offset)) : str.substr(offset)); +}; diff --git a/node_modules/strip-json-comments/license b/node_modules/strip-json-comments/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/strip-json-comments/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/strip-json-comments/package.json b/node_modules/strip-json-comments/package.json new file mode 100644 index 00000000..288ecc77 --- /dev/null +++ b/node_modules/strip-json-comments/package.json @@ -0,0 +1,42 @@ +{ + "name": "strip-json-comments", + "version": "2.0.1", + "description": "Strip comments from JSON. Lets you use comments in your JSON files!", + "license": "MIT", + "repository": "sindresorhus/strip-json-comments", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "json", + "strip", + "remove", + "delete", + "trim", + "comments", + "multiline", + "parse", + "config", + "configuration", + "conf", + "settings", + "util", + "env", + "environment" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/strip-json-comments/readme.md b/node_modules/strip-json-comments/readme.md new file mode 100644 index 00000000..0ee58dfe --- /dev/null +++ b/node_modules/strip-json-comments/readme.md @@ -0,0 +1,64 @@ +# strip-json-comments [![Build Status](https://travis-ci.org/sindresorhus/strip-json-comments.svg?branch=master)](https://travis-ci.org/sindresorhus/strip-json-comments) + +> Strip comments from JSON. Lets you use comments in your JSON files! + +This is now possible: + +```js +{ + // rainbows + "unicorn": /* ❤ */ "cake" +} +``` + +It will replace single-line comments `//` and multi-line comments `/**/` with whitespace. This allows JSON error positions to remain as close as possible to the original source. + +Also available as a [gulp](https://github.com/sindresorhus/gulp-strip-json-comments)/[grunt](https://github.com/sindresorhus/grunt-strip-json-comments)/[broccoli](https://github.com/sindresorhus/broccoli-strip-json-comments) plugin. + + +## Install + +``` +$ npm install --save strip-json-comments +``` + + +## Usage + +```js +const json = '{/*rainbows*/"unicorn":"cake"}'; + +JSON.parse(stripJsonComments(json)); +//=> {unicorn: 'cake'} +``` + + +## API + +### stripJsonComments(input, [options]) + +#### input + +Type: `string` + +Accepts a string with JSON and returns a string without comments. + +#### options + +##### whitespace + +Type: `boolean` +Default: `true` + +Replace comments with whitespace instead of stripping them entirely. + + +## Related + +- [strip-json-comments-cli](https://github.com/sindresorhus/strip-json-comments-cli) - CLI for this module +- [strip-css-comments](https://github.com/sindresorhus/strip-css-comments) - Strip comments from CSS + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/sucrase/CHANGELOG.md b/node_modules/sucrase/CHANGELOG.md new file mode 100644 index 00000000..763ed375 --- /dev/null +++ b/node_modules/sucrase/CHANGELOG.md @@ -0,0 +1,230 @@ +# 3.10.1 (2019-03-31) + +* Fix parsing of `ac` in TypeScript. ([#438]) +* Add support for new TypeScript 3.4 syntax, other parser improvements. ([#439], [#440]) +* Elide TS `import =` statements that are only used as a type ([#441]) +* Properly handle async arrow functions with multiline type parameters ([#443]) + +# 3.10.0 (2019-03-11) + +* Fix bug where `/*/` was being parsed incorrectly. ([#430]) +* Properly parse and compile JSX spread children. ([#431]) +* Implement TypeScript export elision for exported types. ([#433]) + +# 3.9.6 (2019-03-01) + +* Fix Flow bug where `implements` caused the class name to be incorrectly recognized. ([#409]) +* Correctly handle `!:` in TS variable declarations. ([#410]) +* Move more import code into helper functions in prep for some upcoming changes. +* Fix bug where some JSX component names were incorrectly turned into strings. ([#425]) (Yang Zhang) + +# 3.9.5 (2019-01-13) + +* Fix bug when processing a declaration that looks like an export assignment. ([#402]) +* Fix TS import elision for JSX fragments and custom pragmas. ([#403]) +* Treat reserved words as invalid identifiers when handling enums. ([#405]) + +# 3.9.4 (2019-01-07) + +* Avoid false positive when detecting if a class has a superclass. ([#399]) + +# 3.9.3 (2019-01-06) + +* Fix syntax error on arrow functions with multiline return types. ([#393]) + +# 3.9.2 (2019-01-02) + +* Fix crash on optional arrow function params without type annotations. ([#389]) +* Usability bug fixes for website. ([#390]) + +# 3.9.1 (2018-12-31) + +* Fix react-hot-loader transform syntax error with some export styles. ([#384]) +* Fix website to properly show react-hot-loader Babel transform output. ([#386]) + +# 3.9.0 (2018-12-30) + +* Add a react-hot-loader transform. ([#376]) +* Add support for dynamic `import()` syntax in TS types. ([#380]) +* Many improvements to the website, including faster initial pageloads. +* Small performance improvements. + +# 3.8.1 (2018-12-03) + +* Fix infinite loop when a file ends with a short identifier ([#363]) +* Small perf improvements. + +# 3.8.0 (2018-11-25) + +* Various simplifications in prep for compiling the project with AssemblyScript. +* Performance improvements, varying from 10% to 70% better performance depending + on use case. +* Fix infinite loop in flow `declare module` parsing ([#359]) + +# 3.7.1 (2018-11-18) + +* Fix crash on empty export expressions ([#338]) +* Fix crash on TypeScript `declare global` ([#339]) +* Fix crash when using overloaded constructors in TypeScript ([#340]) +* Fix TypeScript import elision when imported names are shadowed by variables + ([#342]) +* Fix import name transform to work in code without semicolons ([#337]) + (Alec Larson) + +# 3.7.0 (2018-11-11) + +* Fix perf regression in TypeScript parsing ([#327]) +* Fix broken line numbers in syntax errors, improve parser backtracking + performance ([#331]) +* Add Parser features and bugfixes from the Babel parser, including TypeScript + 3.0 support ([#333]) + +# 3.6.0 (2018-10-29) + +* Add CLI support for jsx pragmas ([#321]) (Josiah Savary) +* Allow super.method() calls in constructor ([#324]) (Erik Arvidsson) + +# 3.5.0 (2018-09-30) + +* Change class field implementation to use initializer methods ([#313]) +* Update TypeScript and Flow support to include new language features recently + supported by Babel. ([#314], [#315], [#316]) +* Properly handle function name inference in named exports ([#317]) + +# 3.4.2 (2018-08-27) + +* Implement destructuring in export declarations ([#305]) +* Properly handle function name inference in named exports ([#308]) + +# 3.4.1 (2018-07-06) + +* Quote shorthand prop keys that contain a hyphen ([#292]) (Kevin Gao) +* Fix infinite loop on incomplete JSX. ([#296]) + +# 3.4.0 (2018-07-01) + +* Add a sucrase-node CLI that wraps node. ([#288]) +* Allow exported generator functions. ([#290]) + +# 3.3.0 (2018-06-28) + +* Add a --out-extension option to the CLI. ([#282]) +* Add a -q/--quiet option in the CLI and use it in the build script. ([#284]) +* Don't emit semicolons in class bodies. ([#285]) +* Fix ugly emitted comments when removing code between tokens. ([#286]) + +# 3.2.1 (2018-06-27) + +* Allow TS type parameters on object member methods. ([#276]) +* Simplify identity source map generator. ([#265]) +* Fix crash on destructured params in arrow function types. ([#278]) +* Remove @flow directives from comments when the flow transform is enabled. + ([#279]) + +# 3.2.0 (2018-06-25) + +* Fix crash when using JSX elements as props. ([#268]) (Erik Arvidsson) +* Fix incorrect compilation of TypeScript optional class properties with an + initializer. ([#264]) +* Fix crash on class fields that don't end in a semicolon. ([#271]) +* Allow trailing commas after rest elements. ([#272]) +* Don't crash on class bodies with an index signature. ([#273]) +* Allow member expression identifiers when determining React displayName. + ([#274]) +* Add production option and use it for JSX. ([#270]) (Erik Arvidsson) +* Fix off-by-one error in parsing JSX fragments. ([#275]) + +# 3.1.0 (2018-06-18) + +* Add basic support for source maps ([#257], [#261]) + +# 3.0.1 (2018-06-11) + +* Fix crash in `getVersion`. + +# 3.0.0 (2018-06-10) + +### Breaking Changes + +* `transform` now returns an object ([#244]). You now should write + `transform(...).code` instead of just `transform(...)`. `code` is the only + property for now, but this allows Sucrase to return source maps and possibly + other values. +* The package's `dist` folder has been restructured, so direct internal module + imports may break. + +### Other changes + +* Overhaul build system to use Sucrase for everything ([#243]) +* Omit import helpers when unused ([#237]) (Alec Larson) +* Fix files accidentally included in final package ([#233]) +* Various refactors and performance improvements. + +# 2.2.0 (2018-05-19) + +* Add support for JSX fragment syntax. +* Add support for custom JSX pragmas rather than defaulting to + `React.createElement` and `React.Fragment`. + +[#233]: https://github.com/alangpierce/sucrase/pull/233 +[#237]: https://github.com/alangpierce/sucrase/pull/237 +[#243]: https://github.com/alangpierce/sucrase/pull/243 +[#244]: https://github.com/alangpierce/sucrase/pull/244 +[#257]: https://github.com/alangpierce/sucrase/pull/257 +[#261]: https://github.com/alangpierce/sucrase/pull/261 +[#264]: https://github.com/alangpierce/sucrase/pull/264 +[#265]: https://github.com/alangpierce/sucrase/pull/265 +[#268]: https://github.com/alangpierce/sucrase/pull/268 +[#270]: https://github.com/alangpierce/sucrase/pull/270 +[#271]: https://github.com/alangpierce/sucrase/pull/271 +[#272]: https://github.com/alangpierce/sucrase/pull/272 +[#273]: https://github.com/alangpierce/sucrase/pull/273 +[#274]: https://github.com/alangpierce/sucrase/pull/274 +[#275]: https://github.com/alangpierce/sucrase/pull/275 +[#276]: https://github.com/alangpierce/sucrase/pull/276 +[#278]: https://github.com/alangpierce/sucrase/pull/278 +[#279]: https://github.com/alangpierce/sucrase/pull/279 +[#282]: https://github.com/alangpierce/sucrase/pull/282 +[#284]: https://github.com/alangpierce/sucrase/pull/284 +[#285]: https://github.com/alangpierce/sucrase/pull/285 +[#286]: https://github.com/alangpierce/sucrase/pull/286 +[#288]: https://github.com/alangpierce/sucrase/pull/288 +[#290]: https://github.com/alangpierce/sucrase/pull/290 +[#292]: https://github.com/alangpierce/sucrase/pull/292 +[#296]: https://github.com/alangpierce/sucrase/pull/296 +[#305]: https://github.com/alangpierce/sucrase/pull/305 +[#308]: https://github.com/alangpierce/sucrase/pull/308 +[#313]: https://github.com/alangpierce/sucrase/pull/313 +[#314]: https://github.com/alangpierce/sucrase/pull/314 +[#315]: https://github.com/alangpierce/sucrase/pull/315 +[#316]: https://github.com/alangpierce/sucrase/pull/316 +[#317]: https://github.com/alangpierce/sucrase/pull/317 +[#321]: https://github.com/alangpierce/sucrase/pull/321 +[#324]: https://github.com/alangpierce/sucrase/pull/324 +[#327]: https://github.com/alangpierce/sucrase/pull/327 +[#331]: https://github.com/alangpierce/sucrase/pull/331 +[#333]: https://github.com/alangpierce/sucrase/pull/333 +[#337]: https://github.com/alangpierce/sucrase/pull/337 +[#338]: https://github.com/alangpierce/sucrase/pull/338 +[#339]: https://github.com/alangpierce/sucrase/pull/339 +[#340]: https://github.com/alangpierce/sucrase/pull/340 +[#342]: https://github.com/alangpierce/sucrase/pull/342 +[#359]: https://github.com/alangpierce/sucrase/pull/359 +[#363]: https://github.com/alangpierce/sucrase/pull/363 +[#376]: https://github.com/alangpierce/sucrase/pull/376 +[#380]: https://github.com/alangpierce/sucrase/pull/380 +[#384]: https://github.com/alangpierce/sucrase/pull/384 +[#386]: https://github.com/alangpierce/sucrase/pull/386 +[#389]: https://github.com/alangpierce/sucrase/pull/389 +[#390]: https://github.com/alangpierce/sucrase/pull/390 +[#393]: https://github.com/alangpierce/sucrase/pull/393 +[#399]: https://github.com/alangpierce/sucrase/pull/399 +[#402]: https://github.com/alangpierce/sucrase/pull/402 +[#403]: https://github.com/alangpierce/sucrase/pull/403 +[#405]: https://github.com/alangpierce/sucrase/pull/405 +[#409]: https://github.com/alangpierce/sucrase/pull/409 +[#410]: https://github.com/alangpierce/sucrase/pull/410 +[#425]: https://github.com/alangpierce/sucrase/pull/425 +[#430]: https://github.com/alangpierce/sucrase/pull/430 +[#431]: https://github.com/alangpierce/sucrase/pull/431 +[#433]: https://github.com/alangpierce/sucrase/pull/433 diff --git a/node_modules/sucrase/LICENSE b/node_modules/sucrase/LICENSE new file mode 100644 index 00000000..06d77d6a --- /dev/null +++ b/node_modules/sucrase/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2012-2018 various contributors (see AUTHORS) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/sucrase/README.md b/node_modules/sucrase/README.md new file mode 100644 index 00000000..a456ce16 --- /dev/null +++ b/node_modules/sucrase/README.md @@ -0,0 +1,230 @@ +# Sucrase + +[![Build Status](https://travis-ci.org/alangpierce/sucrase.svg?branch=master)](https://travis-ci.org/alangpierce/sucrase) +[![npm version](https://img.shields.io/npm/v/sucrase.svg)](https://www.npmjs.com/package/sucrase) +[![Install Size](https://packagephobia.now.sh/badge?p=sucrase)](https://packagephobia.now.sh/result?p=sucrase) +[![MIT License](https://img.shields.io/npm/l/express.svg?maxAge=2592000)](LICENSE) +[![Join the chat at https://gitter.im/sucrasejs](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/sucrasejs/Lobby) + +### [Try it out](https://sucrase.io) + +Sucrase is an alternative to Babel that allows super-fast development builds. +Instead of compiling a large range of JS features to be able to work in Internet +Explorer, Sucrase assumes that you're developing with a recent browser or recent +Node.js version, so it focuses on compiling non-standard language extensions: +JSX, TypeScript, and Flow. Because of this smaller scope, Sucrase can get away +with an architecture that is much more performant but less extensible and +maintainable. Sucrase's parser is forked from Babel's parser (so Sucrase is +indebted to Babel and wouldn't be possible without it) and trims it down to a +focused subset of what Babel solves. If it fits your use case, hopefully Sucrase +can speed up your development experience! + +**Sucrase has been extensively tested.** It can successfully build +the [Benchling](https://benchling.com/) frontend code, +[Babel](https://github.com/babel/babel), +[React](https://github.com/facebook/react), +[TSLint](https://github.com/palantir/tslint), +[Apollo client](https://github.com/apollographql/apollo-client), and +[decaffeinate](https://github.com/decaffeinate/decaffeinate) +with all tests passing, about 1 million lines of code total. + +**Sucrase is about 20x faster than Babel.** Here's one measurement of how Sucrase +compares with other tools on a large TypeScript codebase with 4045 files and +661081 lines of code: +``` + Time Speed +Sucrase 2.928s 225752 lines per second +swc 13.782s 47966 lines per second +TypeScript 39.603s 16693 lines per second +Babel 52.598s 12569 lines per second +``` + +## Transforms + +The main configuration option in Sucrase is an array of transform names. These +transforms are available: +* **jsx**: Transforms JSX syntax to `React.createElement`, e.g. `
` + becomes `React.createElement('div', {a: b})`. Behaves like Babel 7's + [React preset](https://github.com/babel/babel/tree/master/packages/babel-preset-react), + including adding `createReactClass` display names and JSX context information. +* **typescript**: Compiles TypeScript code to JavaScript, removing type + annotations and handling features like enums. Does not check types. Sucrase + transforms each file independently, so you should enable the `isolatedModules` + TypeScript flag so that the typechecker will disallow the few features like + `const enum`s that need cross-file compilation. +* **flow**: Removes Flow type annotations. Does not check types. +* **imports**: Transforms ES Modules (`import`/`export`) to CommonJS + (`require`/`module.exports`) using the same approach as Babel and TypeScript + with `--esModuleInterop`. Also includes dynamic `import`. +* **react-hot-loader**: Performs the equivalent of the `react-hot-loader/babel` + transform in the [react-hot-loader](https://github.com/gaearon/react-hot-loader) + project. This enables advanced hot reloading use cases such as editing of + bound methods. + +These proposed JS features are built-in and always transformed: +* [Class fields](https://github.com/tc39/proposal-class-fields): `class C { x = 1; }`. + This includes static fields but not the `#x` private field syntax. +* [Export namespace syntax](https://github.com/tc39/proposal-export-ns-from): + `export * as a from 'a';` +* [Numeric separators](https://github.com/tc39/proposal-numeric-separator): + `const n = 1_234;` +* [Optional catch binding](https://github.com/tc39/proposal-optional-catch-binding): + `try { doThing(); } catch { }`. + +### Unsupported syntax + +All JS syntax not mentioned above will "pass through" and needs to be supported +by your JS runtime. For example: +* Decorators, private fields, `throw` expressions, optional chaining, generator + arrow functions, and `do` expressions are all unsupported in browsers and Node + (as of this writing), and Sucrase doesn't make an attempt to transpile them. +* Object rest/spread, async functions, and async iterators are all recent + features that should work fine, but might cause issues if you use older + versions of tools like webpack. BigInt and newer regex features may or may not + work, based on your tooling. + +### JSX Options +Like Babel, Sucrase compiles JSX to React functions by default, but can be +configured for any JSX use case. +* **jsxPragma**: Element creation function, defaults to `React.createElement`. +* **jsxFragmentPragma**: Fragment component, defaults to `React.Fragment`. + +### Legacy CommonJS interop +Two legacy modes can be used with the `import` transform: +* **enableLegacyTypeScriptModuleInterop**: Use the default TypeScript approach + to CommonJS interop instead of assuming that TypeScript's `--esModuleInterop` + flag is enabled. For example, if a CJS module exports a function, legacy + TypeScript interop requires you to write `import * as add from './add';`, + while Babel, Webpack, Node.js, and TypeScript with `--esModuleInterop` require + you to write `import add from './add';`. As mentioned in the + [docs](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-7.html#support-for-import-d-from-cjs-form-commonjs-modules-with---esmoduleinterop), + the TypeScript team recommends you always use `--esModuleInterop`. +* **enableLegacyBabel5ModuleInterop**: Use the Babel 5 approach to CommonJS + interop, so that you can run `require('./MyModule')` instead of + `require('./MyModule').default`. Analogous to + [babel-plugin-add-module-exports](https://github.com/59naga/babel-plugin-add-module-exports). + +## Usage + +Installation: + +``` +yarn add --dev sucrase # Or npm install --save-dev sucrase +``` + +Often, you'll want to use one of the build tool integrations: +[Webpack](https://github.com/alangpierce/sucrase/tree/master/integrations/webpack-loader), +[Gulp](https://github.com/alangpierce/sucrase/tree/master/integrations/gulp-plugin), +[Jest](https://github.com/alangpierce/sucrase/tree/master/integrations/jest-plugin), +[Rollup](https://github.com/rollup/rollup-plugin-sucrase), +[Broccoli](https://github.com/stefanpenner/broccoli-sucrase). + +Compile on-the-fly via a require hook with some [reasonable defaults](src/register.ts): + +```js +// Register just one extension. +require("sucrase/register/ts"); +// Or register all at once. +require("sucrase/register"); +``` + +Compile on-the-fly via a drop-in replacement for node: + +``` +sucrase-node index.ts +``` + +Run on a directory: + +``` +sucrase ./srcDir -d ./outDir --transforms typescript,imports +``` + +Call from JS directly: + +```js +import {transform} from "sucrase"; +const compiledCode = transform(code, {transforms: ["typescript", "imports"]}).code; +``` + +## What Sucrase is not + +Sucrase is intended to be useful for the most common cases, but it does not aim +to have nearly the scope and versatility of Babel. Some specific examples: + +* Sucrase does not check your code for errors. Sucrase's contract is that if you + give it valid code, it will produce valid JS code. If you give it invalid + code, it might produce invalid code, it might produce valid code, or it might + give an error. Always use Sucrase with a linter or typechecker, which is more + suited for error-checking. +* Sucrase is not pluginizable. With the current architecture, transforms need to + be explicitly written to cooperate with each other, so each additional + transform takes significant extra work. +* Sucrase is not good for prototyping language extensions and upcoming language + features. Its faster architecture makes new transforms more difficult to write + and more fragile. +* Sucrase will never produce code for old browsers like IE. Compiling code down + to ES5 is much more complicated than any transformation that Sucrase needs to + do. +* Sucrase is hesitant to implement upcoming JS features, although some of them + make sense to implement for pragmatic reasons. Its main focus is on language + extensions (JSX, TypeScript, Flow) that will never be supported by JS + runtimes. +* Like Babel, Sucrase is not a typechecker, and must process each file in + isolation. For example, TypeScript `const enum`s are treated as regular + `enum`s rather than inlining across files. +* You should think carefully before using Sucrase in production. Sucrase is + mostly beneficial in development, and in many cases, Babel or tsc will be more + suitable for production builds. + +See the [Project Vision](./docs/PROJECT_VISION.md) document for more details on +the philosophy behind Sucrase. + +## Motivation + +As JavaScript implementations mature, it becomes more and more reasonable to +disable Babel transforms, especially in development when you know that you're +targeting a modern runtime. You might hope that you could simplify and speed up +the build step by eventually disabling Babel entirely, but this isn't possible +if you're using a non-standard language extension like JSX, TypeScript, or Flow. +Unfortunately, disabling most transforms in Babel doesn't speed it up as much as +you might expect. To understand, let's take a look at how Babel works: + +1. Tokenize the input source code into a token stream. +2. Parse the token stream into an AST. +3. Walk the AST to compute the scope information for each variable. +4. Apply all transform plugins in a single traversal, resulting in a new AST. +5. Print the resulting AST. + +Only step 4 gets faster when disabling plugins, so there's always a fixed cost +to running Babel regardless of how many transforms are enabled. + +Sucrase bypasses most of these steps, and works like this: +1. Tokenize the input source code into a token stream using a trimmed-down fork + of the Babel parser. This fork does not produce a full AST, but still + produces meaningful token metadata specifically designed for the later + transforms. +2. Scan through the tokens, computing preliminary information like all + imported/exported names. +3. Run the transform by doing a pass through the tokens and performing a number + of careful find-and-replace operations, like replacing `; +} diff --git a/node_modules/sucrase/dist/CJSImportProcessor.js b/node_modules/sucrase/dist/CJSImportProcessor.js new file mode 100644 index 00000000..f5d78078 --- /dev/null +++ b/node_modules/sucrase/dist/CJSImportProcessor.js @@ -0,0 +1,461 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _HelperManager = require('./HelperManager'); + + +var _tokenizer = require('./parser/tokenizer'); +var _keywords = require('./parser/tokenizer/keywords'); +var _types = require('./parser/tokenizer/types'); + +var _getNonTypeIdentifiers = require('./util/getNonTypeIdentifiers'); + + + + + + + + + + + + + + + + +/** + * Class responsible for preprocessing and bookkeeping import and export declarations within the + * file. + * + * TypeScript uses a simpler mechanism that does not use functions like interopRequireDefault and + * interopRequireWildcard, so we also allow that mode for compatibility. + */ + class CJSImportProcessor { + __init() {this.nonTypeIdentifiers = new Set()} + __init2() {this.importInfoByPath = new Map()} + __init3() {this.importsToReplace = new Map()} + __init4() {this.identifierReplacements = new Map()} + __init5() {this.exportBindingsByLocalName = new Map()} + + + constructor( + nameManager, + tokens, + enableLegacyTypeScriptModuleInterop, + options, + isTypeScriptTransformEnabled, + ) {;this.nameManager = nameManager;this.tokens = tokens;this.enableLegacyTypeScriptModuleInterop = enableLegacyTypeScriptModuleInterop;this.options = options;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;CJSImportProcessor.prototype.__init.call(this);CJSImportProcessor.prototype.__init2.call(this);CJSImportProcessor.prototype.__init3.call(this);CJSImportProcessor.prototype.__init4.call(this);CJSImportProcessor.prototype.__init5.call(this); + this.helpers = new (0, _HelperManager.HelperManager)(nameManager); + } + + getPrefixCode() { + return this.helpers.emitHelpers(); + } + + preprocessTokens() { + for (let i = 0; i < this.tokens.tokens.length; i++) { + if ( + this.tokens.matches1AtIndex(i, _types.TokenType._import) && + !this.tokens.matches3AtIndex(i, _types.TokenType._import, _types.TokenType.name, _types.TokenType.eq) + ) { + this.preprocessImportAtIndex(i); + } + if ( + this.tokens.matches1AtIndex(i, _types.TokenType._export) && + !this.tokens.matches2AtIndex(i, _types.TokenType._export, _types.TokenType.eq) + ) { + this.preprocessExportAtIndex(i); + } + } + this.generateImportReplacements(); + } + + /** + * In TypeScript, import statements that only import types should be removed. This does not count + * bare imports. + */ + pruneTypeOnlyImports() { + this.nonTypeIdentifiers = _getNonTypeIdentifiers.getNonTypeIdentifiers.call(void 0, this.tokens, this.options); + for (const [path, importInfo] of this.importInfoByPath.entries()) { + if ( + importInfo.hasBareImport || + importInfo.hasStarExport || + importInfo.exportStarNames.length > 0 || + importInfo.namedExports.length > 0 + ) { + continue; + } + const names = [ + ...importInfo.defaultNames, + ...importInfo.wildcardNames, + ...importInfo.namedImports.map(({localName}) => localName), + ]; + if (names.every((name) => this.isTypeName(name))) { + this.importsToReplace.set(path, ""); + } + } + } + + isTypeName(name) { + return this.isTypeScriptTransformEnabled && !this.nonTypeIdentifiers.has(name); + } + + generateImportReplacements() { + for (const [path, importInfo] of this.importInfoByPath.entries()) { + const { + defaultNames, + wildcardNames, + namedImports, + namedExports, + exportStarNames, + hasStarExport, + } = importInfo; + + if ( + defaultNames.length === 0 && + wildcardNames.length === 0 && + namedImports.length === 0 && + namedExports.length === 0 && + exportStarNames.length === 0 && + !hasStarExport + ) { + // Import is never used, so don't even assign a name. + this.importsToReplace.set(path, `require('${path}');`); + continue; + } + + const primaryImportName = this.getFreeIdentifierForPath(path); + let secondaryImportName; + if (this.enableLegacyTypeScriptModuleInterop) { + secondaryImportName = primaryImportName; + } else { + secondaryImportName = + wildcardNames.length > 0 ? wildcardNames[0] : this.getFreeIdentifierForPath(path); + } + let requireCode = `var ${primaryImportName} = require('${path}');`; + if (wildcardNames.length > 0) { + for (const wildcardName of wildcardNames) { + const moduleExpr = this.enableLegacyTypeScriptModuleInterop + ? primaryImportName + : `${this.helpers.getHelperName("interopRequireWildcard")}(${primaryImportName})`; + requireCode += ` var ${wildcardName} = ${moduleExpr};`; + } + } else if (exportStarNames.length > 0 && secondaryImportName !== primaryImportName) { + requireCode += ` var ${secondaryImportName} = ${this.helpers.getHelperName( + "interopRequireWildcard", + )}(${primaryImportName});`; + } else if (defaultNames.length > 0 && secondaryImportName !== primaryImportName) { + requireCode += ` var ${secondaryImportName} = ${this.helpers.getHelperName( + "interopRequireDefault", + )}(${primaryImportName});`; + } + + for (const {importedName, localName} of namedExports) { + requireCode += ` ${this.helpers.getHelperName( + "createNamedExportFrom", + )}(${primaryImportName}, '${localName}', '${importedName}');`; + } + for (const exportStarName of exportStarNames) { + requireCode += ` exports.${exportStarName} = ${secondaryImportName};`; + } + if (hasStarExport) { + requireCode += ` ${this.helpers.getHelperName("createStarExport")}(${primaryImportName});`; + } + + this.importsToReplace.set(path, requireCode); + + for (const defaultName of defaultNames) { + this.identifierReplacements.set(defaultName, `${secondaryImportName}.default`); + } + for (const {importedName, localName} of namedImports) { + this.identifierReplacements.set(localName, `${primaryImportName}.${importedName}`); + } + } + } + + getFreeIdentifierForPath(path) { + const components = path.split("/"); + const lastComponent = components[components.length - 1]; + const baseName = lastComponent.replace(/\W/g, ""); + return this.nameManager.claimFreeName(`_${baseName}`); + } + + preprocessImportAtIndex(index) { + const defaultNames = []; + const wildcardNames = []; + let namedImports = []; + + index++; + if ( + (this.tokens.matchesContextualAtIndex(index, _keywords.ContextualKeyword._type) || + this.tokens.matches1AtIndex(index, _types.TokenType._typeof)) && + !this.tokens.matches1AtIndex(index + 1, _types.TokenType.comma) && + !this.tokens.matchesContextualAtIndex(index + 1, _keywords.ContextualKeyword._from) + ) { + // import type declaration, so no need to process anything. + return; + } + + if (this.tokens.matches1AtIndex(index, _types.TokenType.parenL)) { + // Dynamic import, so nothing to do + return; + } + + if (this.tokens.matches1AtIndex(index, _types.TokenType.name)) { + defaultNames.push(this.tokens.identifierNameAtIndex(index)); + index++; + if (this.tokens.matches1AtIndex(index, _types.TokenType.comma)) { + index++; + } + } + + if (this.tokens.matches1AtIndex(index, _types.TokenType.star)) { + // * as + index += 2; + wildcardNames.push(this.tokens.identifierNameAtIndex(index)); + index++; + } + + if (this.tokens.matches1AtIndex(index, _types.TokenType.braceL)) { + index++; + ({newIndex: index, namedImports} = this.getNamedImports(index)); + } + + if (this.tokens.matchesContextualAtIndex(index, _keywords.ContextualKeyword._from)) { + index++; + } + + if (!this.tokens.matches1AtIndex(index, _types.TokenType.string)) { + throw new Error("Expected string token at the end of import statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + importInfo.defaultNames.push(...defaultNames); + importInfo.wildcardNames.push(...wildcardNames); + importInfo.namedImports.push(...namedImports); + if (defaultNames.length === 0 && wildcardNames.length === 0 && namedImports.length === 0) { + importInfo.hasBareImport = true; + } + } + + preprocessExportAtIndex(index) { + if ( + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._var) || + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._let) || + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._const) + ) { + this.preprocessVarExportAtIndex(index); + } else if ( + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._function) || + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._class) + ) { + const exportName = this.tokens.identifierNameAtIndex(index + 2); + this.addExportBinding(exportName, exportName); + } else if (this.tokens.matches3AtIndex(index, _types.TokenType._export, _types.TokenType.name, _types.TokenType._function)) { + const exportName = this.tokens.identifierNameAtIndex(index + 3); + this.addExportBinding(exportName, exportName); + } else if (this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType.braceL)) { + this.preprocessNamedExportAtIndex(index); + } else if (this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType.star)) { + this.preprocessExportStarAtIndex(index); + } + } + + preprocessVarExportAtIndex(index) { + let depth = 0; + // Handle cases like `export let {x} = y;`, starting at the open-brace in that case. + for (let i = index + 2; ; i++) { + if ( + this.tokens.matches1AtIndex(i, _types.TokenType.braceL) || + this.tokens.matches1AtIndex(i, _types.TokenType.dollarBraceL) || + this.tokens.matches1AtIndex(i, _types.TokenType.bracketL) + ) { + depth++; + } else if ( + this.tokens.matches1AtIndex(i, _types.TokenType.braceR) || + this.tokens.matches1AtIndex(i, _types.TokenType.bracketR) + ) { + depth--; + } else if (depth === 0 && !this.tokens.matches1AtIndex(i, _types.TokenType.name)) { + break; + } else if (this.tokens.matches1AtIndex(1, _types.TokenType.eq)) { + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + i = endIndex - 1; + } else { + const token = this.tokens.tokens[i]; + if (_tokenizer.isDeclaration.call(void 0, token)) { + const exportName = this.tokens.identifierNameAtIndex(i); + this.identifierReplacements.set(exportName, `exports.${exportName}`); + } + } + } + } + + /** + * Walk this export statement just in case it's an export...from statement. + * If it is, combine it into the import info for that path. Otherwise, just + * bail out; it'll be handled later. + */ + preprocessNamedExportAtIndex(index) { + // export { + index += 2; + const {newIndex, namedImports} = this.getNamedImports(index); + index = newIndex; + + if (this.tokens.matchesContextualAtIndex(index, _keywords.ContextualKeyword._from)) { + index++; + } else { + // Reinterpret "a as b" to be local/exported rather than imported/local. + for (const {importedName: localName, localName: exportedName} of namedImports) { + this.addExportBinding(localName, exportedName); + } + return; + } + + if (!this.tokens.matches1AtIndex(index, _types.TokenType.string)) { + throw new Error("Expected string token at the end of import statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + importInfo.namedExports.push(...namedImports); + } + + preprocessExportStarAtIndex(index) { + let exportedName = null; + if (this.tokens.matches3AtIndex(index, _types.TokenType._export, _types.TokenType.star, _types.TokenType._as)) { + // export * as + index += 3; + exportedName = this.tokens.identifierNameAtIndex(index); + // foo from + index += 2; + } else { + // export * from + index += 3; + } + if (!this.tokens.matches1AtIndex(index, _types.TokenType.string)) { + throw new Error("Expected string token at the end of star export statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + if (exportedName !== null) { + importInfo.exportStarNames.push(exportedName); + } else { + importInfo.hasStarExport = true; + } + } + + getNamedImports(index) { + const namedImports = []; + while (true) { + if (this.tokens.matches1AtIndex(index, _types.TokenType.braceR)) { + index++; + break; + } + + // Flow type imports should just be ignored. + let isTypeImport = false; + if ( + (this.tokens.matchesContextualAtIndex(index, _keywords.ContextualKeyword._type) || + this.tokens.matches1AtIndex(index, _types.TokenType._typeof)) && + this.tokens.matches1AtIndex(index + 1, _types.TokenType.name) && + !this.tokens.matchesContextualAtIndex(index + 1, _keywords.ContextualKeyword._as) + ) { + isTypeImport = true; + index++; + } + + const importedName = this.tokens.identifierNameAtIndex(index); + let localName; + index++; + if (this.tokens.matchesContextualAtIndex(index, _keywords.ContextualKeyword._as)) { + index++; + localName = this.tokens.identifierNameAtIndex(index); + index++; + } else { + localName = importedName; + } + if (!isTypeImport) { + namedImports.push({importedName, localName}); + } + if (this.tokens.matches2AtIndex(index, _types.TokenType.comma, _types.TokenType.braceR)) { + index += 2; + break; + } else if (this.tokens.matches1AtIndex(index, _types.TokenType.braceR)) { + index++; + break; + } else if (this.tokens.matches1AtIndex(index, _types.TokenType.comma)) { + index++; + } else { + throw new Error(`Unexpected token: ${JSON.stringify(this.tokens.tokens[index])}`); + } + } + return {newIndex: index, namedImports}; + } + + /** + * Get a mutable import info object for this path, creating one if it doesn't + * exist yet. + */ + getImportInfo(path) { + const existingInfo = this.importInfoByPath.get(path); + if (existingInfo) { + return existingInfo; + } + const newInfo = { + defaultNames: [], + wildcardNames: [], + namedImports: [], + namedExports: [], + hasBareImport: false, + exportStarNames: [], + hasStarExport: false, + }; + this.importInfoByPath.set(path, newInfo); + return newInfo; + } + + addExportBinding(localName, exportedName) { + if (!this.exportBindingsByLocalName.has(localName)) { + this.exportBindingsByLocalName.set(localName, []); + } + this.exportBindingsByLocalName.get(localName).push(exportedName); + } + + /** + * Return the code to use for the import for this path, or the empty string if + * the code has already been "claimed" by a previous import. + */ + claimImportCode(importPath) { + const result = this.importsToReplace.get(importPath); + this.importsToReplace.set(importPath, ""); + return result || ""; + } + + getIdentifierReplacement(identifierName) { + return this.identifierReplacements.get(identifierName) || null; + } + + /** + * Return a string like `exports.foo = exports.bar`. + */ + resolveExportBinding(assignedName) { + const exportedNames = this.exportBindingsByLocalName.get(assignedName); + if (!exportedNames || exportedNames.length === 0) { + return null; + } + return exportedNames.map((exportedName) => `exports.${exportedName}`).join(" = "); + } + + /** + * Return all imported/exported names where we might be interested in whether usages of those + * names are shadowed. + */ + getGlobalNames() { + return new Set([ + ...this.identifierReplacements.keys(), + ...this.exportBindingsByLocalName.keys(), + ]); + } +} exports.default = CJSImportProcessor; diff --git a/node_modules/sucrase/dist/CJSImportProcessor.mjs b/node_modules/sucrase/dist/CJSImportProcessor.mjs new file mode 100644 index 00000000..0830b27c --- /dev/null +++ b/node_modules/sucrase/dist/CJSImportProcessor.mjs @@ -0,0 +1,461 @@ +import {HelperManager} from "./HelperManager"; + + +import {isDeclaration} from "./parser/tokenizer"; +import {ContextualKeyword} from "./parser/tokenizer/keywords"; +import {TokenType as tt} from "./parser/tokenizer/types"; + +import {getNonTypeIdentifiers} from "./util/getNonTypeIdentifiers"; + + + + + + + + + + + + + + + + +/** + * Class responsible for preprocessing and bookkeeping import and export declarations within the + * file. + * + * TypeScript uses a simpler mechanism that does not use functions like interopRequireDefault and + * interopRequireWildcard, so we also allow that mode for compatibility. + */ +export default class CJSImportProcessor { + __init() {this.nonTypeIdentifiers = new Set()} + __init2() {this.importInfoByPath = new Map()} + __init3() {this.importsToReplace = new Map()} + __init4() {this.identifierReplacements = new Map()} + __init5() {this.exportBindingsByLocalName = new Map()} + + + constructor( + nameManager, + tokens, + enableLegacyTypeScriptModuleInterop, + options, + isTypeScriptTransformEnabled, + ) {;this.nameManager = nameManager;this.tokens = tokens;this.enableLegacyTypeScriptModuleInterop = enableLegacyTypeScriptModuleInterop;this.options = options;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;CJSImportProcessor.prototype.__init.call(this);CJSImportProcessor.prototype.__init2.call(this);CJSImportProcessor.prototype.__init3.call(this);CJSImportProcessor.prototype.__init4.call(this);CJSImportProcessor.prototype.__init5.call(this); + this.helpers = new HelperManager(nameManager); + } + + getPrefixCode() { + return this.helpers.emitHelpers(); + } + + preprocessTokens() { + for (let i = 0; i < this.tokens.tokens.length; i++) { + if ( + this.tokens.matches1AtIndex(i, tt._import) && + !this.tokens.matches3AtIndex(i, tt._import, tt.name, tt.eq) + ) { + this.preprocessImportAtIndex(i); + } + if ( + this.tokens.matches1AtIndex(i, tt._export) && + !this.tokens.matches2AtIndex(i, tt._export, tt.eq) + ) { + this.preprocessExportAtIndex(i); + } + } + this.generateImportReplacements(); + } + + /** + * In TypeScript, import statements that only import types should be removed. This does not count + * bare imports. + */ + pruneTypeOnlyImports() { + this.nonTypeIdentifiers = getNonTypeIdentifiers(this.tokens, this.options); + for (const [path, importInfo] of this.importInfoByPath.entries()) { + if ( + importInfo.hasBareImport || + importInfo.hasStarExport || + importInfo.exportStarNames.length > 0 || + importInfo.namedExports.length > 0 + ) { + continue; + } + const names = [ + ...importInfo.defaultNames, + ...importInfo.wildcardNames, + ...importInfo.namedImports.map(({localName}) => localName), + ]; + if (names.every((name) => this.isTypeName(name))) { + this.importsToReplace.set(path, ""); + } + } + } + + isTypeName(name) { + return this.isTypeScriptTransformEnabled && !this.nonTypeIdentifiers.has(name); + } + + generateImportReplacements() { + for (const [path, importInfo] of this.importInfoByPath.entries()) { + const { + defaultNames, + wildcardNames, + namedImports, + namedExports, + exportStarNames, + hasStarExport, + } = importInfo; + + if ( + defaultNames.length === 0 && + wildcardNames.length === 0 && + namedImports.length === 0 && + namedExports.length === 0 && + exportStarNames.length === 0 && + !hasStarExport + ) { + // Import is never used, so don't even assign a name. + this.importsToReplace.set(path, `require('${path}');`); + continue; + } + + const primaryImportName = this.getFreeIdentifierForPath(path); + let secondaryImportName; + if (this.enableLegacyTypeScriptModuleInterop) { + secondaryImportName = primaryImportName; + } else { + secondaryImportName = + wildcardNames.length > 0 ? wildcardNames[0] : this.getFreeIdentifierForPath(path); + } + let requireCode = `var ${primaryImportName} = require('${path}');`; + if (wildcardNames.length > 0) { + for (const wildcardName of wildcardNames) { + const moduleExpr = this.enableLegacyTypeScriptModuleInterop + ? primaryImportName + : `${this.helpers.getHelperName("interopRequireWildcard")}(${primaryImportName})`; + requireCode += ` var ${wildcardName} = ${moduleExpr};`; + } + } else if (exportStarNames.length > 0 && secondaryImportName !== primaryImportName) { + requireCode += ` var ${secondaryImportName} = ${this.helpers.getHelperName( + "interopRequireWildcard", + )}(${primaryImportName});`; + } else if (defaultNames.length > 0 && secondaryImportName !== primaryImportName) { + requireCode += ` var ${secondaryImportName} = ${this.helpers.getHelperName( + "interopRequireDefault", + )}(${primaryImportName});`; + } + + for (const {importedName, localName} of namedExports) { + requireCode += ` ${this.helpers.getHelperName( + "createNamedExportFrom", + )}(${primaryImportName}, '${localName}', '${importedName}');`; + } + for (const exportStarName of exportStarNames) { + requireCode += ` exports.${exportStarName} = ${secondaryImportName};`; + } + if (hasStarExport) { + requireCode += ` ${this.helpers.getHelperName("createStarExport")}(${primaryImportName});`; + } + + this.importsToReplace.set(path, requireCode); + + for (const defaultName of defaultNames) { + this.identifierReplacements.set(defaultName, `${secondaryImportName}.default`); + } + for (const {importedName, localName} of namedImports) { + this.identifierReplacements.set(localName, `${primaryImportName}.${importedName}`); + } + } + } + + getFreeIdentifierForPath(path) { + const components = path.split("/"); + const lastComponent = components[components.length - 1]; + const baseName = lastComponent.replace(/\W/g, ""); + return this.nameManager.claimFreeName(`_${baseName}`); + } + + preprocessImportAtIndex(index) { + const defaultNames = []; + const wildcardNames = []; + let namedImports = []; + + index++; + if ( + (this.tokens.matchesContextualAtIndex(index, ContextualKeyword._type) || + this.tokens.matches1AtIndex(index, tt._typeof)) && + !this.tokens.matches1AtIndex(index + 1, tt.comma) && + !this.tokens.matchesContextualAtIndex(index + 1, ContextualKeyword._from) + ) { + // import type declaration, so no need to process anything. + return; + } + + if (this.tokens.matches1AtIndex(index, tt.parenL)) { + // Dynamic import, so nothing to do + return; + } + + if (this.tokens.matches1AtIndex(index, tt.name)) { + defaultNames.push(this.tokens.identifierNameAtIndex(index)); + index++; + if (this.tokens.matches1AtIndex(index, tt.comma)) { + index++; + } + } + + if (this.tokens.matches1AtIndex(index, tt.star)) { + // * as + index += 2; + wildcardNames.push(this.tokens.identifierNameAtIndex(index)); + index++; + } + + if (this.tokens.matches1AtIndex(index, tt.braceL)) { + index++; + ({newIndex: index, namedImports} = this.getNamedImports(index)); + } + + if (this.tokens.matchesContextualAtIndex(index, ContextualKeyword._from)) { + index++; + } + + if (!this.tokens.matches1AtIndex(index, tt.string)) { + throw new Error("Expected string token at the end of import statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + importInfo.defaultNames.push(...defaultNames); + importInfo.wildcardNames.push(...wildcardNames); + importInfo.namedImports.push(...namedImports); + if (defaultNames.length === 0 && wildcardNames.length === 0 && namedImports.length === 0) { + importInfo.hasBareImport = true; + } + } + + preprocessExportAtIndex(index) { + if ( + this.tokens.matches2AtIndex(index, tt._export, tt._var) || + this.tokens.matches2AtIndex(index, tt._export, tt._let) || + this.tokens.matches2AtIndex(index, tt._export, tt._const) + ) { + this.preprocessVarExportAtIndex(index); + } else if ( + this.tokens.matches2AtIndex(index, tt._export, tt._function) || + this.tokens.matches2AtIndex(index, tt._export, tt._class) + ) { + const exportName = this.tokens.identifierNameAtIndex(index + 2); + this.addExportBinding(exportName, exportName); + } else if (this.tokens.matches3AtIndex(index, tt._export, tt.name, tt._function)) { + const exportName = this.tokens.identifierNameAtIndex(index + 3); + this.addExportBinding(exportName, exportName); + } else if (this.tokens.matches2AtIndex(index, tt._export, tt.braceL)) { + this.preprocessNamedExportAtIndex(index); + } else if (this.tokens.matches2AtIndex(index, tt._export, tt.star)) { + this.preprocessExportStarAtIndex(index); + } + } + + preprocessVarExportAtIndex(index) { + let depth = 0; + // Handle cases like `export let {x} = y;`, starting at the open-brace in that case. + for (let i = index + 2; ; i++) { + if ( + this.tokens.matches1AtIndex(i, tt.braceL) || + this.tokens.matches1AtIndex(i, tt.dollarBraceL) || + this.tokens.matches1AtIndex(i, tt.bracketL) + ) { + depth++; + } else if ( + this.tokens.matches1AtIndex(i, tt.braceR) || + this.tokens.matches1AtIndex(i, tt.bracketR) + ) { + depth--; + } else if (depth === 0 && !this.tokens.matches1AtIndex(i, tt.name)) { + break; + } else if (this.tokens.matches1AtIndex(1, tt.eq)) { + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + i = endIndex - 1; + } else { + const token = this.tokens.tokens[i]; + if (isDeclaration(token)) { + const exportName = this.tokens.identifierNameAtIndex(i); + this.identifierReplacements.set(exportName, `exports.${exportName}`); + } + } + } + } + + /** + * Walk this export statement just in case it's an export...from statement. + * If it is, combine it into the import info for that path. Otherwise, just + * bail out; it'll be handled later. + */ + preprocessNamedExportAtIndex(index) { + // export { + index += 2; + const {newIndex, namedImports} = this.getNamedImports(index); + index = newIndex; + + if (this.tokens.matchesContextualAtIndex(index, ContextualKeyword._from)) { + index++; + } else { + // Reinterpret "a as b" to be local/exported rather than imported/local. + for (const {importedName: localName, localName: exportedName} of namedImports) { + this.addExportBinding(localName, exportedName); + } + return; + } + + if (!this.tokens.matches1AtIndex(index, tt.string)) { + throw new Error("Expected string token at the end of import statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + importInfo.namedExports.push(...namedImports); + } + + preprocessExportStarAtIndex(index) { + let exportedName = null; + if (this.tokens.matches3AtIndex(index, tt._export, tt.star, tt._as)) { + // export * as + index += 3; + exportedName = this.tokens.identifierNameAtIndex(index); + // foo from + index += 2; + } else { + // export * from + index += 3; + } + if (!this.tokens.matches1AtIndex(index, tt.string)) { + throw new Error("Expected string token at the end of star export statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + if (exportedName !== null) { + importInfo.exportStarNames.push(exportedName); + } else { + importInfo.hasStarExport = true; + } + } + + getNamedImports(index) { + const namedImports = []; + while (true) { + if (this.tokens.matches1AtIndex(index, tt.braceR)) { + index++; + break; + } + + // Flow type imports should just be ignored. + let isTypeImport = false; + if ( + (this.tokens.matchesContextualAtIndex(index, ContextualKeyword._type) || + this.tokens.matches1AtIndex(index, tt._typeof)) && + this.tokens.matches1AtIndex(index + 1, tt.name) && + !this.tokens.matchesContextualAtIndex(index + 1, ContextualKeyword._as) + ) { + isTypeImport = true; + index++; + } + + const importedName = this.tokens.identifierNameAtIndex(index); + let localName; + index++; + if (this.tokens.matchesContextualAtIndex(index, ContextualKeyword._as)) { + index++; + localName = this.tokens.identifierNameAtIndex(index); + index++; + } else { + localName = importedName; + } + if (!isTypeImport) { + namedImports.push({importedName, localName}); + } + if (this.tokens.matches2AtIndex(index, tt.comma, tt.braceR)) { + index += 2; + break; + } else if (this.tokens.matches1AtIndex(index, tt.braceR)) { + index++; + break; + } else if (this.tokens.matches1AtIndex(index, tt.comma)) { + index++; + } else { + throw new Error(`Unexpected token: ${JSON.stringify(this.tokens.tokens[index])}`); + } + } + return {newIndex: index, namedImports}; + } + + /** + * Get a mutable import info object for this path, creating one if it doesn't + * exist yet. + */ + getImportInfo(path) { + const existingInfo = this.importInfoByPath.get(path); + if (existingInfo) { + return existingInfo; + } + const newInfo = { + defaultNames: [], + wildcardNames: [], + namedImports: [], + namedExports: [], + hasBareImport: false, + exportStarNames: [], + hasStarExport: false, + }; + this.importInfoByPath.set(path, newInfo); + return newInfo; + } + + addExportBinding(localName, exportedName) { + if (!this.exportBindingsByLocalName.has(localName)) { + this.exportBindingsByLocalName.set(localName, []); + } + this.exportBindingsByLocalName.get(localName).push(exportedName); + } + + /** + * Return the code to use for the import for this path, or the empty string if + * the code has already been "claimed" by a previous import. + */ + claimImportCode(importPath) { + const result = this.importsToReplace.get(importPath); + this.importsToReplace.set(importPath, ""); + return result || ""; + } + + getIdentifierReplacement(identifierName) { + return this.identifierReplacements.get(identifierName) || null; + } + + /** + * Return a string like `exports.foo = exports.bar`. + */ + resolveExportBinding(assignedName) { + const exportedNames = this.exportBindingsByLocalName.get(assignedName); + if (!exportedNames || exportedNames.length === 0) { + return null; + } + return exportedNames.map((exportedName) => `exports.${exportedName}`).join(" = "); + } + + /** + * Return all imported/exported names where we might be interested in whether usages of those + * names are shadowed. + */ + getGlobalNames() { + return new Set([ + ...this.identifierReplacements.keys(), + ...this.exportBindingsByLocalName.keys(), + ]); + } +} diff --git a/node_modules/sucrase/dist/HelperManager.d.ts b/node_modules/sucrase/dist/HelperManager.d.ts new file mode 100644 index 00000000..5968ed98 --- /dev/null +++ b/node_modules/sucrase/dist/HelperManager.d.ts @@ -0,0 +1,17 @@ +import NameManager from "./NameManager"; +declare const HELPERS: { + interopRequireWildcard: string; + interopRequireDefault: string; + createNamedExportFrom: string; + createStarExport: string; +}; +export declare class HelperManager { + readonly nameManager: NameManager; + helperNames: { + [baseName in keyof typeof HELPERS]?: string; + }; + constructor(nameManager: NameManager); + getHelperName(baseName: keyof typeof HELPERS): string; + emitHelpers(): string; +} +export {}; diff --git a/node_modules/sucrase/dist/HelperManager.js b/node_modules/sucrase/dist/HelperManager.js new file mode 100644 index 00000000..33b408f9 --- /dev/null +++ b/node_modules/sucrase/dist/HelperManager.js @@ -0,0 +1,78 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); + +const HELPERS = { + interopRequireWildcard: ` + function interopRequireWildcard(obj) { + if (obj && obj.__esModule) { + return obj; + } else { + var newObj = {}; + if (obj != null) { + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + newObj[key] = obj[key]; + } + } + } + newObj.default = obj; + return newObj; + } + } + `, + interopRequireDefault: ` + function interopRequireDefault(obj) { + return obj && obj.__esModule ? obj : { default: obj }; + } + `, + createNamedExportFrom: ` + function createNamedExportFrom(obj, localName, importedName) { + Object.defineProperty(exports, localName, {enumerable: true, get: () => obj[importedName]}); + } + `, + // Note that TypeScript and Babel do this differently; TypeScript does a simple existence + // check in the exports object and does a plain assignment, whereas Babel uses + // defineProperty and builds an object of explicitly-exported names so that star exports can + // always take lower precedence. For now, we do the easier TypeScript thing. + createStarExport: ` + function createStarExport(obj) { + Object.keys(obj) + .filter((key) => key !== "default" && key !== "__esModule") + .forEach((key) => { + if (exports.hasOwnProperty(key)) { + return; + } + Object.defineProperty(exports, key, {enumerable: true, get: () => obj[key]}); + }); + } + `, +}; + + class HelperManager { + __init() {this.helperNames = {}} + constructor( nameManager) {;this.nameManager = nameManager;HelperManager.prototype.__init.call(this);} + + getHelperName(baseName) { + let helperName = this.helperNames[baseName]; + if (helperName) { + return helperName; + } + helperName = this.nameManager.claimFreeName(`_${baseName}`); + this.helperNames[baseName] = helperName; + return helperName; + } + + emitHelpers() { + let resultCode = ""; + for (const [baseName, helperCode] of Object.entries(HELPERS)) { + const helperName = this.helperNames[baseName]; + if (helperName) { + resultCode += " "; + resultCode += helperCode + .replace(baseName, helperName) + .replace(/\s+/g, " ") + .trim(); + } + } + return resultCode; + } +} exports.HelperManager = HelperManager; diff --git a/node_modules/sucrase/dist/HelperManager.mjs b/node_modules/sucrase/dist/HelperManager.mjs new file mode 100644 index 00000000..459c1ded --- /dev/null +++ b/node_modules/sucrase/dist/HelperManager.mjs @@ -0,0 +1,78 @@ + + +const HELPERS = { + interopRequireWildcard: ` + function interopRequireWildcard(obj) { + if (obj && obj.__esModule) { + return obj; + } else { + var newObj = {}; + if (obj != null) { + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + newObj[key] = obj[key]; + } + } + } + newObj.default = obj; + return newObj; + } + } + `, + interopRequireDefault: ` + function interopRequireDefault(obj) { + return obj && obj.__esModule ? obj : { default: obj }; + } + `, + createNamedExportFrom: ` + function createNamedExportFrom(obj, localName, importedName) { + Object.defineProperty(exports, localName, {enumerable: true, get: () => obj[importedName]}); + } + `, + // Note that TypeScript and Babel do this differently; TypeScript does a simple existence + // check in the exports object and does a plain assignment, whereas Babel uses + // defineProperty and builds an object of explicitly-exported names so that star exports can + // always take lower precedence. For now, we do the easier TypeScript thing. + createStarExport: ` + function createStarExport(obj) { + Object.keys(obj) + .filter((key) => key !== "default" && key !== "__esModule") + .forEach((key) => { + if (exports.hasOwnProperty(key)) { + return; + } + Object.defineProperty(exports, key, {enumerable: true, get: () => obj[key]}); + }); + } + `, +}; + +export class HelperManager { + __init() {this.helperNames = {}} + constructor( nameManager) {;this.nameManager = nameManager;HelperManager.prototype.__init.call(this);} + + getHelperName(baseName) { + let helperName = this.helperNames[baseName]; + if (helperName) { + return helperName; + } + helperName = this.nameManager.claimFreeName(`_${baseName}`); + this.helperNames[baseName] = helperName; + return helperName; + } + + emitHelpers() { + let resultCode = ""; + for (const [baseName, helperCode] of Object.entries(HELPERS)) { + const helperName = this.helperNames[baseName]; + if (helperName) { + resultCode += " "; + resultCode += helperCode + .replace(baseName, helperName) + .replace(/\s+/g, " ") + .trim(); + } + } + return resultCode; + } +} diff --git a/node_modules/sucrase/dist/NameManager.d.ts b/node_modules/sucrase/dist/NameManager.d.ts new file mode 100644 index 00000000..07c0d543 --- /dev/null +++ b/node_modules/sucrase/dist/NameManager.d.ts @@ -0,0 +1,9 @@ +import TokenProcessor from "./TokenProcessor"; +export default class NameManager { + readonly tokens: TokenProcessor; + private readonly usedNames; + constructor(tokens: TokenProcessor); + preprocessNames(): void; + claimFreeName(name: string): string; + findFreeName(name: string): string; +} diff --git a/node_modules/sucrase/dist/NameManager.js b/node_modules/sucrase/dist/NameManager.js new file mode 100644 index 00000000..c04f8ada --- /dev/null +++ b/node_modules/sucrase/dist/NameManager.js @@ -0,0 +1,33 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _types = require('./parser/tokenizer/types'); + + + class NameManager { + __init() {this.usedNames = new Set()} + + constructor( tokens) {;this.tokens = tokens;NameManager.prototype.__init.call(this);} + + preprocessNames() { + for (let i = 0; i < this.tokens.tokens.length; i++) { + if (this.tokens.matches1AtIndex(i, _types.TokenType.name)) { + this.usedNames.add(this.tokens.identifierNameAtIndex(i)); + } + } + } + + claimFreeName(name) { + const newName = this.findFreeName(name); + this.usedNames.add(newName); + return newName; + } + + findFreeName(name) { + if (!this.usedNames.has(name)) { + return name; + } + let suffixNum = 2; + while (this.usedNames.has(name + suffixNum)) { + suffixNum++; + } + return name + suffixNum; + } +} exports.default = NameManager; diff --git a/node_modules/sucrase/dist/NameManager.mjs b/node_modules/sucrase/dist/NameManager.mjs new file mode 100644 index 00000000..74b0c1da --- /dev/null +++ b/node_modules/sucrase/dist/NameManager.mjs @@ -0,0 +1,33 @@ +import {TokenType as tt} from "./parser/tokenizer/types"; + + +export default class NameManager { + __init() {this.usedNames = new Set()} + + constructor( tokens) {;this.tokens = tokens;NameManager.prototype.__init.call(this);} + + preprocessNames() { + for (let i = 0; i < this.tokens.tokens.length; i++) { + if (this.tokens.matches1AtIndex(i, tt.name)) { + this.usedNames.add(this.tokens.identifierNameAtIndex(i)); + } + } + } + + claimFreeName(name) { + const newName = this.findFreeName(name); + this.usedNames.add(newName); + return newName; + } + + findFreeName(name) { + if (!this.usedNames.has(name)) { + return name; + } + let suffixNum = 2; + while (this.usedNames.has(name + suffixNum)) { + suffixNum++; + } + return name + suffixNum; + } +} diff --git a/node_modules/sucrase/dist/TokenProcessor.d.ts b/node_modules/sucrase/dist/TokenProcessor.d.ts new file mode 100644 index 00000000..c19748a0 --- /dev/null +++ b/node_modules/sucrase/dist/TokenProcessor.d.ts @@ -0,0 +1,61 @@ +import { Token } from "./parser/tokenizer"; +import { ContextualKeyword } from "./parser/tokenizer/keywords"; +import { TokenType } from "./parser/tokenizer/types"; +export interface TokenProcessorSnapshot { + resultCode: string; + tokenIndex: number; +} +export default class TokenProcessor { + readonly code: string; + readonly tokens: Array; + readonly isFlowEnabled: boolean; + private resultCode; + private tokenIndex; + constructor(code: string, tokens: Array, isFlowEnabled: boolean); + /** + * Make a new TokenProcessor for things like lookahead. + */ + snapshot(): TokenProcessorSnapshot; + restoreToSnapshot(snapshot: TokenProcessorSnapshot): void; + getResultCodeIndex(): number; + reset(): void; + matchesContextualAtIndex(index: number, contextualKeyword: ContextualKeyword): boolean; + identifierNameAtIndex(index: number): string; + identifierName(): string; + identifierNameForToken(token: Token): string; + rawCodeForToken(token: Token): string; + stringValueAtIndex(index: number): string; + stringValue(): string; + stringValueForToken(token: Token): string; + matches1AtIndex(index: number, t1: TokenType): boolean; + matches2AtIndex(index: number, t1: TokenType, t2: TokenType): boolean; + matches3AtIndex(index: number, t1: TokenType, t2: TokenType, t3: TokenType): boolean; + matches1(t1: TokenType): boolean; + matches2(t1: TokenType, t2: TokenType): boolean; + matches3(t1: TokenType, t2: TokenType, t3: TokenType): boolean; + matches4(t1: TokenType, t2: TokenType, t3: TokenType, t4: TokenType): boolean; + matches5(t1: TokenType, t2: TokenType, t3: TokenType, t4: TokenType, t5: TokenType): boolean; + matchesContextual(contextualKeyword: ContextualKeyword): boolean; + matchesContextIdAndLabel(type: TokenType, contextId: number): boolean; + previousWhitespaceAndComments(): string; + replaceToken(newCode: string): void; + replaceTokenTrimmingLeftWhitespace(newCode: string): void; + removeInitialToken(): void; + removeToken(): void; + copyExpectedToken(tokenType: TokenType): void; + copyToken(): void; + copyTokenWithPrefix(prefix: string): void; + appendCode(code: string): void; + currentToken(): Token; + currentTokenCode(): string; + tokenAtRelativeIndex(relativeIndex: number): Token; + currentIndex(): number; + /** + * Move to the next token. Only suitable in preprocessing steps. When + * generating new code, you should use copyToken or removeToken. + */ + nextToken(): void; + previousToken(): void; + finish(): string; + isAtEnd(): boolean; +} diff --git a/node_modules/sucrase/dist/TokenProcessor.js b/node_modules/sucrase/dist/TokenProcessor.js new file mode 100644 index 00000000..15ebc26f --- /dev/null +++ b/node_modules/sucrase/dist/TokenProcessor.js @@ -0,0 +1,244 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + +var _types = require('./parser/tokenizer/types'); + + + + + + + class TokenProcessor { + __init() {this.resultCode = ""} + __init2() {this.tokenIndex = 0} + + constructor( + code, + tokens, + isFlowEnabled, + ) {;this.code = code;this.tokens = tokens;this.isFlowEnabled = isFlowEnabled;TokenProcessor.prototype.__init.call(this);TokenProcessor.prototype.__init2.call(this);} + + /** + * Make a new TokenProcessor for things like lookahead. + */ + snapshot() { + return {resultCode: this.resultCode, tokenIndex: this.tokenIndex}; + } + + restoreToSnapshot(snapshot) { + this.resultCode = snapshot.resultCode; + this.tokenIndex = snapshot.tokenIndex; + } + + getResultCodeIndex() { + return this.resultCode.length; + } + + reset() { + this.resultCode = ""; + this.tokenIndex = 0; + } + + matchesContextualAtIndex(index, contextualKeyword) { + return ( + this.matches1AtIndex(index, _types.TokenType.name) && + this.tokens[index].contextualKeyword === contextualKeyword + ); + } + + identifierNameAtIndex(index) { + // TODO: We need to process escapes since technically you can have unicode escapes in variable + // names. + return this.identifierNameForToken(this.tokens[index]); + } + + identifierName() { + return this.identifierNameForToken(this.currentToken()); + } + + identifierNameForToken(token) { + return this.code.slice(token.start, token.end); + } + + rawCodeForToken(token) { + return this.code.slice(token.start, token.end); + } + + stringValueAtIndex(index) { + return this.stringValueForToken(this.tokens[index]); + } + + stringValue() { + return this.stringValueForToken(this.currentToken()); + } + + stringValueForToken(token) { + // This is used to identify when two imports are the same and to resolve TypeScript enum keys. + // Ideally we'd process escapes within the strings, but for now we pretty much take the raw + // code. + return this.code.slice(token.start + 1, token.end - 1); + } + + matches1AtIndex(index, t1) { + return this.tokens[index].type === t1; + } + + matches2AtIndex(index, t1, t2) { + return this.tokens[index].type === t1 && this.tokens[index + 1].type === t2; + } + + matches3AtIndex(index, t1, t2, t3) { + return ( + this.tokens[index].type === t1 && + this.tokens[index + 1].type === t2 && + this.tokens[index + 2].type === t3 + ); + } + + matches1(t1) { + return this.tokens[this.tokenIndex].type === t1; + } + + matches2(t1, t2) { + return this.tokens[this.tokenIndex].type === t1 && this.tokens[this.tokenIndex + 1].type === t2; + } + + matches3(t1, t2, t3) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 + ); + } + + matches4(t1, t2, t3, t4) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 && + this.tokens[this.tokenIndex + 3].type === t4 + ); + } + + matches5(t1, t2, t3, t4, t5) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 && + this.tokens[this.tokenIndex + 3].type === t4 && + this.tokens[this.tokenIndex + 4].type === t5 + ); + } + + matchesContextual(contextualKeyword) { + return this.matchesContextualAtIndex(this.tokenIndex, contextualKeyword); + } + + matchesContextIdAndLabel(type, contextId) { + return this.matches1(type) && this.currentToken().contextId === contextId; + } + + previousWhitespaceAndComments() { + let whitespaceAndComments = this.code.slice( + this.tokenIndex > 0 ? this.tokens[this.tokenIndex - 1].end : 0, + this.tokenIndex < this.tokens.length ? this.tokens[this.tokenIndex].start : this.code.length, + ); + if (this.isFlowEnabled) { + whitespaceAndComments = whitespaceAndComments.replace(/@flow/g, ""); + } + return whitespaceAndComments; + } + + replaceToken(newCode) { + this.resultCode += this.previousWhitespaceAndComments(); + this.resultCode += newCode; + this.tokenIndex++; + } + + replaceTokenTrimmingLeftWhitespace(newCode) { + this.resultCode += this.previousWhitespaceAndComments().replace(/[^\r\n]/g, ""); + this.resultCode += newCode; + this.tokenIndex++; + } + + removeInitialToken() { + this.replaceToken(""); + } + + removeToken() { + this.replaceTokenTrimmingLeftWhitespace(""); + } + + copyExpectedToken(tokenType) { + if (this.tokens[this.tokenIndex].type !== tokenType) { + throw new Error(`Expected token ${tokenType}`); + } + this.copyToken(); + } + + copyToken() { + this.resultCode += this.previousWhitespaceAndComments(); + this.resultCode += this.code.slice( + this.tokens[this.tokenIndex].start, + this.tokens[this.tokenIndex].end, + ); + this.tokenIndex++; + } + + copyTokenWithPrefix(prefix) { + this.resultCode += this.previousWhitespaceAndComments(); + this.resultCode += prefix; + this.resultCode += this.code.slice( + this.tokens[this.tokenIndex].start, + this.tokens[this.tokenIndex].end, + ); + this.tokenIndex++; + } + + appendCode(code) { + this.resultCode += code; + } + + currentToken() { + return this.tokens[this.tokenIndex]; + } + + currentTokenCode() { + const token = this.currentToken(); + return this.code.slice(token.start, token.end); + } + + tokenAtRelativeIndex(relativeIndex) { + return this.tokens[this.tokenIndex + relativeIndex]; + } + + currentIndex() { + return this.tokenIndex; + } + + /** + * Move to the next token. Only suitable in preprocessing steps. When + * generating new code, you should use copyToken or removeToken. + */ + nextToken() { + if (this.tokenIndex === this.tokens.length) { + throw new Error("Unexpectedly reached end of input."); + } + this.tokenIndex++; + } + + previousToken() { + this.tokenIndex--; + } + + finish() { + if (this.tokenIndex !== this.tokens.length) { + throw new Error("Tried to finish processing tokens before reaching the end."); + } + this.resultCode += this.previousWhitespaceAndComments(); + return this.resultCode; + } + + isAtEnd() { + return this.tokenIndex === this.tokens.length; + } +} exports.default = TokenProcessor; diff --git a/node_modules/sucrase/dist/TokenProcessor.mjs b/node_modules/sucrase/dist/TokenProcessor.mjs new file mode 100644 index 00000000..f595494e --- /dev/null +++ b/node_modules/sucrase/dist/TokenProcessor.mjs @@ -0,0 +1,244 @@ + + +import { TokenType as tt} from "./parser/tokenizer/types"; + + + + + + +export default class TokenProcessor { + __init() {this.resultCode = ""} + __init2() {this.tokenIndex = 0} + + constructor( + code, + tokens, + isFlowEnabled, + ) {;this.code = code;this.tokens = tokens;this.isFlowEnabled = isFlowEnabled;TokenProcessor.prototype.__init.call(this);TokenProcessor.prototype.__init2.call(this);} + + /** + * Make a new TokenProcessor for things like lookahead. + */ + snapshot() { + return {resultCode: this.resultCode, tokenIndex: this.tokenIndex}; + } + + restoreToSnapshot(snapshot) { + this.resultCode = snapshot.resultCode; + this.tokenIndex = snapshot.tokenIndex; + } + + getResultCodeIndex() { + return this.resultCode.length; + } + + reset() { + this.resultCode = ""; + this.tokenIndex = 0; + } + + matchesContextualAtIndex(index, contextualKeyword) { + return ( + this.matches1AtIndex(index, tt.name) && + this.tokens[index].contextualKeyword === contextualKeyword + ); + } + + identifierNameAtIndex(index) { + // TODO: We need to process escapes since technically you can have unicode escapes in variable + // names. + return this.identifierNameForToken(this.tokens[index]); + } + + identifierName() { + return this.identifierNameForToken(this.currentToken()); + } + + identifierNameForToken(token) { + return this.code.slice(token.start, token.end); + } + + rawCodeForToken(token) { + return this.code.slice(token.start, token.end); + } + + stringValueAtIndex(index) { + return this.stringValueForToken(this.tokens[index]); + } + + stringValue() { + return this.stringValueForToken(this.currentToken()); + } + + stringValueForToken(token) { + // This is used to identify when two imports are the same and to resolve TypeScript enum keys. + // Ideally we'd process escapes within the strings, but for now we pretty much take the raw + // code. + return this.code.slice(token.start + 1, token.end - 1); + } + + matches1AtIndex(index, t1) { + return this.tokens[index].type === t1; + } + + matches2AtIndex(index, t1, t2) { + return this.tokens[index].type === t1 && this.tokens[index + 1].type === t2; + } + + matches3AtIndex(index, t1, t2, t3) { + return ( + this.tokens[index].type === t1 && + this.tokens[index + 1].type === t2 && + this.tokens[index + 2].type === t3 + ); + } + + matches1(t1) { + return this.tokens[this.tokenIndex].type === t1; + } + + matches2(t1, t2) { + return this.tokens[this.tokenIndex].type === t1 && this.tokens[this.tokenIndex + 1].type === t2; + } + + matches3(t1, t2, t3) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 + ); + } + + matches4(t1, t2, t3, t4) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 && + this.tokens[this.tokenIndex + 3].type === t4 + ); + } + + matches5(t1, t2, t3, t4, t5) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 && + this.tokens[this.tokenIndex + 3].type === t4 && + this.tokens[this.tokenIndex + 4].type === t5 + ); + } + + matchesContextual(contextualKeyword) { + return this.matchesContextualAtIndex(this.tokenIndex, contextualKeyword); + } + + matchesContextIdAndLabel(type, contextId) { + return this.matches1(type) && this.currentToken().contextId === contextId; + } + + previousWhitespaceAndComments() { + let whitespaceAndComments = this.code.slice( + this.tokenIndex > 0 ? this.tokens[this.tokenIndex - 1].end : 0, + this.tokenIndex < this.tokens.length ? this.tokens[this.tokenIndex].start : this.code.length, + ); + if (this.isFlowEnabled) { + whitespaceAndComments = whitespaceAndComments.replace(/@flow/g, ""); + } + return whitespaceAndComments; + } + + replaceToken(newCode) { + this.resultCode += this.previousWhitespaceAndComments(); + this.resultCode += newCode; + this.tokenIndex++; + } + + replaceTokenTrimmingLeftWhitespace(newCode) { + this.resultCode += this.previousWhitespaceAndComments().replace(/[^\r\n]/g, ""); + this.resultCode += newCode; + this.tokenIndex++; + } + + removeInitialToken() { + this.replaceToken(""); + } + + removeToken() { + this.replaceTokenTrimmingLeftWhitespace(""); + } + + copyExpectedToken(tokenType) { + if (this.tokens[this.tokenIndex].type !== tokenType) { + throw new Error(`Expected token ${tokenType}`); + } + this.copyToken(); + } + + copyToken() { + this.resultCode += this.previousWhitespaceAndComments(); + this.resultCode += this.code.slice( + this.tokens[this.tokenIndex].start, + this.tokens[this.tokenIndex].end, + ); + this.tokenIndex++; + } + + copyTokenWithPrefix(prefix) { + this.resultCode += this.previousWhitespaceAndComments(); + this.resultCode += prefix; + this.resultCode += this.code.slice( + this.tokens[this.tokenIndex].start, + this.tokens[this.tokenIndex].end, + ); + this.tokenIndex++; + } + + appendCode(code) { + this.resultCode += code; + } + + currentToken() { + return this.tokens[this.tokenIndex]; + } + + currentTokenCode() { + const token = this.currentToken(); + return this.code.slice(token.start, token.end); + } + + tokenAtRelativeIndex(relativeIndex) { + return this.tokens[this.tokenIndex + relativeIndex]; + } + + currentIndex() { + return this.tokenIndex; + } + + /** + * Move to the next token. Only suitable in preprocessing steps. When + * generating new code, you should use copyToken or removeToken. + */ + nextToken() { + if (this.tokenIndex === this.tokens.length) { + throw new Error("Unexpectedly reached end of input."); + } + this.tokenIndex++; + } + + previousToken() { + this.tokenIndex--; + } + + finish() { + if (this.tokenIndex !== this.tokens.length) { + throw new Error("Tried to finish processing tokens before reaching the end."); + } + this.resultCode += this.previousWhitespaceAndComments(); + return this.resultCode; + } + + isAtEnd() { + return this.tokenIndex === this.tokens.length; + } +} diff --git a/node_modules/sucrase/dist/cli.d.ts b/node_modules/sucrase/dist/cli.d.ts new file mode 100644 index 00000000..238c75fb --- /dev/null +++ b/node_modules/sucrase/dist/cli.d.ts @@ -0,0 +1 @@ +export default function run(): void; diff --git a/node_modules/sucrase/dist/cli.js b/node_modules/sucrase/dist/cli.js new file mode 100644 index 00000000..d6bc5018 --- /dev/null +++ b/node_modules/sucrase/dist/cli.js @@ -0,0 +1,106 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});/* eslint-disable no-console */ +var _commander = require('commander'); var _commander2 = _interopRequireDefault(_commander); +var _fs = require('mz/fs'); +var _path = require('path'); + +var _index = require('./index'); + + + + + + + + + function run() { + _commander2.default + .description(`Sucrase: super-fast Babel alternative.`) + .usage("[options] ") + .option( + "-d, --out-dir ", + "Compile an input directory of modules into an output directory.", + ) + .option("--out-extension ", "File extension to use for all output files.", "js") + .option("--exclude-dirs ", "Names of directories that should not be traversed.") + .option("-t, --transforms ", "Comma-separated list of transforms to run.") + .option("-q, --quiet", "Don't print the names of converted files.") + .option( + "--enable-legacy-typescript-module-interop", + "Use default TypeScript ESM/CJS interop strategy.", + ) + .option("--enable-legacy-babel5-module-interop", "Use Babel 5 ESM/CJS interop strategy.") + .option("--jsx-pragma ", "Element creation function, defaults to `React.createElement`") + .option("--jsx-fragment-pragma ", "Fragment component, defaults to `React.Fragment`") + .parse(process.argv); + + if (!_commander2.default.outDir) { + console.error("Out directory is required"); + process.exit(1); + } + + if (!_commander2.default.transforms) { + console.error("Transforms option is required."); + process.exit(1); + } + + if (!_commander2.default.args[0]) { + console.error("Source directory is required."); + process.exit(1); + } + + const outDir = _commander2.default.outDir; + const srcDir = _commander2.default.args[0]; + + const options = { + outExtension: _commander2.default.outExtension, + excludeDirs: _commander2.default.excludeDirs ? _commander2.default.excludeDirs.split(",") : [], + quiet: _commander2.default.quiet, + sucraseOptions: { + transforms: _commander2.default.transforms.split(","), + enableLegacyTypeScriptModuleInterop: _commander2.default.enableLegacyTypescriptModuleInterop, + enableLegacyBabel5ModuleInterop: _commander2.default.enableLegacyBabel5ModuleInterop, + jsxPragma: _commander2.default.jsxPragma || "React.createElement", + jsxFragmentPragma: _commander2.default.jsxFragmentPragma || "React.Fragment", + }, + }; + + buildDirectory(srcDir, outDir, options).catch((e) => { + process.exitCode = 1; + console.error(e); + }); +} exports.default = run; + +async function buildDirectory( + srcDirPath, + outDirPath, + options, +) { + const extension = options.sucraseOptions.transforms.includes("typescript") ? ".ts" : ".js"; + if (!(await _fs.exists.call(void 0, outDirPath))) { + await _fs.mkdir.call(void 0, outDirPath); + } + for (const child of await _fs.readdir.call(void 0, srcDirPath)) { + if (["node_modules", ".git"].includes(child) || options.excludeDirs.includes(child)) { + continue; + } + const srcChildPath = _path.join.call(void 0, srcDirPath, child); + const outChildPath = _path.join.call(void 0, outDirPath, child); + if ((await _fs.stat.call(void 0, srcChildPath)).isDirectory()) { + await buildDirectory(srcChildPath, outChildPath, options); + } else if (srcChildPath.endsWith(extension)) { + const outPath = `${outChildPath.substr(0, outChildPath.length - extension.length)}.${ + options.outExtension + }`; + await buildFile(srcChildPath, outPath, options); + } + } +} + +async function buildFile(srcPath, outPath, options) { + if (!options.quiet) { + console.log(`${srcPath} -> ${outPath}`); + } + const code = (await _fs.readFile.call(void 0, srcPath)).toString(); + const transformedCode = _index.transform.call(void 0, code, {...options.sucraseOptions, filePath: srcPath}).code; + await _fs.writeFile.call(void 0, outPath, transformedCode); +} diff --git a/node_modules/sucrase/dist/cli.mjs b/node_modules/sucrase/dist/cli.mjs new file mode 100644 index 00000000..4ca4517d --- /dev/null +++ b/node_modules/sucrase/dist/cli.mjs @@ -0,0 +1,106 @@ +/* eslint-disable no-console */ +import commander from "commander"; +import {exists, mkdir, readdir, readFile, stat, writeFile} from "mz/fs"; +import {join} from "path"; + +import { transform} from "./index"; + + + + + + + + +export default function run() { + commander + .description(`Sucrase: super-fast Babel alternative.`) + .usage("[options] ") + .option( + "-d, --out-dir ", + "Compile an input directory of modules into an output directory.", + ) + .option("--out-extension ", "File extension to use for all output files.", "js") + .option("--exclude-dirs ", "Names of directories that should not be traversed.") + .option("-t, --transforms ", "Comma-separated list of transforms to run.") + .option("-q, --quiet", "Don't print the names of converted files.") + .option( + "--enable-legacy-typescript-module-interop", + "Use default TypeScript ESM/CJS interop strategy.", + ) + .option("--enable-legacy-babel5-module-interop", "Use Babel 5 ESM/CJS interop strategy.") + .option("--jsx-pragma ", "Element creation function, defaults to `React.createElement`") + .option("--jsx-fragment-pragma ", "Fragment component, defaults to `React.Fragment`") + .parse(process.argv); + + if (!commander.outDir) { + console.error("Out directory is required"); + process.exit(1); + } + + if (!commander.transforms) { + console.error("Transforms option is required."); + process.exit(1); + } + + if (!commander.args[0]) { + console.error("Source directory is required."); + process.exit(1); + } + + const outDir = commander.outDir; + const srcDir = commander.args[0]; + + const options = { + outExtension: commander.outExtension, + excludeDirs: commander.excludeDirs ? commander.excludeDirs.split(",") : [], + quiet: commander.quiet, + sucraseOptions: { + transforms: commander.transforms.split(","), + enableLegacyTypeScriptModuleInterop: commander.enableLegacyTypescriptModuleInterop, + enableLegacyBabel5ModuleInterop: commander.enableLegacyBabel5ModuleInterop, + jsxPragma: commander.jsxPragma || "React.createElement", + jsxFragmentPragma: commander.jsxFragmentPragma || "React.Fragment", + }, + }; + + buildDirectory(srcDir, outDir, options).catch((e) => { + process.exitCode = 1; + console.error(e); + }); +} + +async function buildDirectory( + srcDirPath, + outDirPath, + options, +) { + const extension = options.sucraseOptions.transforms.includes("typescript") ? ".ts" : ".js"; + if (!(await exists(outDirPath))) { + await mkdir(outDirPath); + } + for (const child of await readdir(srcDirPath)) { + if (["node_modules", ".git"].includes(child) || options.excludeDirs.includes(child)) { + continue; + } + const srcChildPath = join(srcDirPath, child); + const outChildPath = join(outDirPath, child); + if ((await stat(srcChildPath)).isDirectory()) { + await buildDirectory(srcChildPath, outChildPath, options); + } else if (srcChildPath.endsWith(extension)) { + const outPath = `${outChildPath.substr(0, outChildPath.length - extension.length)}.${ + options.outExtension + }`; + await buildFile(srcChildPath, outPath, options); + } + } +} + +async function buildFile(srcPath, outPath, options) { + if (!options.quiet) { + console.log(`${srcPath} -> ${outPath}`); + } + const code = (await readFile(srcPath)).toString(); + const transformedCode = transform(code, {...options.sucraseOptions, filePath: srcPath}).code; + await writeFile(outPath, transformedCode); +} diff --git a/node_modules/sucrase/dist/computeSourceMap.d.ts b/node_modules/sucrase/dist/computeSourceMap.d.ts new file mode 100644 index 00000000..31d54abb --- /dev/null +++ b/node_modules/sucrase/dist/computeSourceMap.d.ts @@ -0,0 +1,14 @@ +import { SourceMapOptions } from "./index"; +export interface RawSourceMap { + version: number; + file: string; + sources: Array; + sourceRoot?: string; + sourcesContent?: Array; + mappings: string; + names: Array; +} +/** + * Generate a simple source map indicating that each line maps directly to the original line. + */ +export default function computeSourceMap(code: string, filePath: string, { compiledFilename }: SourceMapOptions): RawSourceMap; diff --git a/node_modules/sucrase/dist/computeSourceMap.js b/node_modules/sucrase/dist/computeSourceMap.js new file mode 100644 index 00000000..2fa81abf --- /dev/null +++ b/node_modules/sucrase/dist/computeSourceMap.js @@ -0,0 +1,35 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); +var _charcodes = require('./parser/util/charcodes'); + + + + + + + + + + + +/** + * Generate a simple source map indicating that each line maps directly to the original line. + */ + function computeSourceMap( + code, + filePath, + {compiledFilename}, +) { + let mappings = "AAAA"; + for (let i = 0; i < code.length; i++) { + if (code.charCodeAt(i) === _charcodes.charCodes.lineFeed) { + mappings += ";AACA"; + } + } + return { + version: 3, + file: compiledFilename || "", + sources: [filePath], + mappings, + names: [], + }; +} exports.default = computeSourceMap; diff --git a/node_modules/sucrase/dist/computeSourceMap.mjs b/node_modules/sucrase/dist/computeSourceMap.mjs new file mode 100644 index 00000000..79089d7f --- /dev/null +++ b/node_modules/sucrase/dist/computeSourceMap.mjs @@ -0,0 +1,35 @@ + +import {charCodes} from "./parser/util/charcodes"; + + + + + + + + + + + +/** + * Generate a simple source map indicating that each line maps directly to the original line. + */ +export default function computeSourceMap( + code, + filePath, + {compiledFilename}, +) { + let mappings = "AAAA"; + for (let i = 0; i < code.length; i++) { + if (code.charCodeAt(i) === charCodes.lineFeed) { + mappings += ";AACA"; + } + } + return { + version: 3, + file: compiledFilename || "", + sources: [filePath], + mappings, + names: [], + }; +} diff --git a/node_modules/sucrase/dist/identifyShadowedGlobals.d.ts b/node_modules/sucrase/dist/identifyShadowedGlobals.d.ts new file mode 100644 index 00000000..6435e79b --- /dev/null +++ b/node_modules/sucrase/dist/identifyShadowedGlobals.d.ts @@ -0,0 +1,12 @@ +import { Scope } from "./parser/tokenizer/state"; +import TokenProcessor from "./TokenProcessor"; +/** + * Traverse the given tokens and modify them if necessary to indicate that some names shadow global + * variables. + */ +export default function identifyShadowedGlobals(tokens: TokenProcessor, scopes: Array, globalNames: Set): void; +/** + * We can do a fast up-front check to see if there are any declarations to global names. If not, + * then there's no point in computing scope assignments. + */ +export declare function hasShadowedGlobals(tokens: TokenProcessor, globalNames: Set): boolean; diff --git a/node_modules/sucrase/dist/identifyShadowedGlobals.js b/node_modules/sucrase/dist/identifyShadowedGlobals.js new file mode 100644 index 00000000..bc3877f7 --- /dev/null +++ b/node_modules/sucrase/dist/identifyShadowedGlobals.js @@ -0,0 +1,94 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); + + + +var _tokenizer = require('./parser/tokenizer'); + +var _types = require('./parser/tokenizer/types'); + + +/** + * Traverse the given tokens and modify them if necessary to indicate that some names shadow global + * variables. + */ + function identifyShadowedGlobals( + tokens, + scopes, + globalNames, +) { + if (!hasShadowedGlobals(tokens, globalNames)) { + return; + } + markShadowedGlobals(tokens, scopes, globalNames); +} exports.default = identifyShadowedGlobals; + +/** + * We can do a fast up-front check to see if there are any declarations to global names. If not, + * then there's no point in computing scope assignments. + */ +// Exported for testing. + function hasShadowedGlobals(tokens, globalNames) { + for (const token of tokens.tokens) { + if ( + token.type === _types.TokenType.name && + _tokenizer.isNonTopLevelDeclaration.call(void 0, token) && + globalNames.has(tokens.identifierNameForToken(token)) + ) { + return true; + } + } + return false; +} exports.hasShadowedGlobals = hasShadowedGlobals; + +function markShadowedGlobals( + tokens, + scopes, + globalNames, +) { + const scopeStack = []; + let scopeIndex = scopes.length - 1; + // Scopes were generated at completion time, so they're sorted by end index, so we can maintain a + // good stack by going backwards through them. + for (let i = tokens.tokens.length - 1; ; i--) { + while (scopeStack.length > 0 && scopeStack[scopeStack.length - 1].startTokenIndex === i + 1) { + scopeStack.pop(); + } + while (scopeIndex >= 0 && scopes[scopeIndex].endTokenIndex === i + 1) { + scopeStack.push(scopes[scopeIndex]); + scopeIndex--; + } + // Process scopes after the last iteration so we can make sure we pop all of them. + if (i < 0) { + break; + } + + const token = tokens.tokens[i]; + const name = tokens.identifierNameForToken(token); + if (scopeStack.length > 1 && token.type === _types.TokenType.name && globalNames.has(name)) { + if (_tokenizer.isBlockScopedDeclaration.call(void 0, token)) { + markShadowedForScope(scopeStack[scopeStack.length - 1], tokens, name); + } else if (_tokenizer.isFunctionScopedDeclaration.call(void 0, token)) { + let stackIndex = scopeStack.length - 1; + while (stackIndex > 0 && !scopeStack[stackIndex].isFunctionScope) { + stackIndex--; + } + if (stackIndex < 0) { + throw new Error("Did not find parent function scope."); + } + markShadowedForScope(scopeStack[stackIndex], tokens, name); + } + } + } + if (scopeStack.length > 0) { + throw new Error("Expected empty scope stack after processing file."); + } +} + +function markShadowedForScope(scope, tokens, name) { + for (let i = scope.startTokenIndex; i < scope.endTokenIndex; i++) { + const token = tokens.tokens[i]; + if (token.type === _types.TokenType.name && tokens.identifierNameForToken(token) === name) { + token.shadowsGlobal = true; + } + } +} diff --git a/node_modules/sucrase/dist/identifyShadowedGlobals.mjs b/node_modules/sucrase/dist/identifyShadowedGlobals.mjs new file mode 100644 index 00000000..6ac3ae8e --- /dev/null +++ b/node_modules/sucrase/dist/identifyShadowedGlobals.mjs @@ -0,0 +1,94 @@ +import { + isBlockScopedDeclaration, + isFunctionScopedDeclaration, + isNonTopLevelDeclaration, +} from "./parser/tokenizer"; + +import {TokenType as tt} from "./parser/tokenizer/types"; + + +/** + * Traverse the given tokens and modify them if necessary to indicate that some names shadow global + * variables. + */ +export default function identifyShadowedGlobals( + tokens, + scopes, + globalNames, +) { + if (!hasShadowedGlobals(tokens, globalNames)) { + return; + } + markShadowedGlobals(tokens, scopes, globalNames); +} + +/** + * We can do a fast up-front check to see if there are any declarations to global names. If not, + * then there's no point in computing scope assignments. + */ +// Exported for testing. +export function hasShadowedGlobals(tokens, globalNames) { + for (const token of tokens.tokens) { + if ( + token.type === tt.name && + isNonTopLevelDeclaration(token) && + globalNames.has(tokens.identifierNameForToken(token)) + ) { + return true; + } + } + return false; +} + +function markShadowedGlobals( + tokens, + scopes, + globalNames, +) { + const scopeStack = []; + let scopeIndex = scopes.length - 1; + // Scopes were generated at completion time, so they're sorted by end index, so we can maintain a + // good stack by going backwards through them. + for (let i = tokens.tokens.length - 1; ; i--) { + while (scopeStack.length > 0 && scopeStack[scopeStack.length - 1].startTokenIndex === i + 1) { + scopeStack.pop(); + } + while (scopeIndex >= 0 && scopes[scopeIndex].endTokenIndex === i + 1) { + scopeStack.push(scopes[scopeIndex]); + scopeIndex--; + } + // Process scopes after the last iteration so we can make sure we pop all of them. + if (i < 0) { + break; + } + + const token = tokens.tokens[i]; + const name = tokens.identifierNameForToken(token); + if (scopeStack.length > 1 && token.type === tt.name && globalNames.has(name)) { + if (isBlockScopedDeclaration(token)) { + markShadowedForScope(scopeStack[scopeStack.length - 1], tokens, name); + } else if (isFunctionScopedDeclaration(token)) { + let stackIndex = scopeStack.length - 1; + while (stackIndex > 0 && !scopeStack[stackIndex].isFunctionScope) { + stackIndex--; + } + if (stackIndex < 0) { + throw new Error("Did not find parent function scope."); + } + markShadowedForScope(scopeStack[stackIndex], tokens, name); + } + } + } + if (scopeStack.length > 0) { + throw new Error("Expected empty scope stack after processing file."); + } +} + +function markShadowedForScope(scope, tokens, name) { + for (let i = scope.startTokenIndex; i < scope.endTokenIndex; i++) { + const token = tokens.tokens[i]; + if (token.type === tt.name && tokens.identifierNameForToken(token) === name) { + token.shadowsGlobal = true; + } + } +} diff --git a/node_modules/sucrase/dist/index.d.ts b/node_modules/sucrase/dist/index.d.ts new file mode 100644 index 00000000..d6bd044c --- /dev/null +++ b/node_modules/sucrase/dist/index.d.ts @@ -0,0 +1,63 @@ +import CJSImportProcessor from "./CJSImportProcessor"; +import { RawSourceMap } from "./computeSourceMap"; +import NameManager from "./NameManager"; +import { Scope } from "./parser/tokenizer/state"; +import TokenProcessor from "./TokenProcessor"; +export declare type Transform = "jsx" | "typescript" | "flow" | "imports" | "react-hot-loader"; +export interface SourceMapOptions { + /** + * The name to use in the "file" field of the source map. This should be the name of the compiled + * file. + */ + compiledFilename: string; +} +export interface Options { + transforms: Array; + /** + * If specified, function name to use in place of React.createClass when compiling JSX. + */ + jsxPragma?: string; + /** + * If specified, function name to use in place of React.Fragment when compiling JSX. + */ + jsxFragmentPragma?: string; + /** + * If true, replicate the import behavior of TypeScript's esModuleInterop: false. + */ + enableLegacyTypeScriptModuleInterop?: boolean; + /** + * If true, replicate the import behavior Babel 5 and babel-plugin-add-module-exports. + */ + enableLegacyBabel5ModuleInterop?: boolean; + /** + * If specified, we also return a RawSourceMap object alongside the code. Currently, source maps + * simply map each line to the original line without any mappings within lines, since Sucrase + * preserves line numbers. filePath must be specified if this option is enabled. + */ + sourceMapOptions?: SourceMapOptions; + /** + * File path to use in error messages, React display names, and source maps. + */ + filePath?: string; + /** + * If specified, omit any development-specific code in the output. + */ + production?: boolean; +} +export interface TransformResult { + code: string; + sourceMap?: RawSourceMap; +} +export interface SucraseContext { + tokenProcessor: TokenProcessor; + scopes: Array; + nameManager: NameManager; + importProcessor: CJSImportProcessor | null; +} +export declare function getVersion(): string; +export declare function transform(code: string, options: Options): TransformResult; +/** + * Return a string representation of the sucrase tokens, mostly useful for + * diagnostic purposes. + */ +export declare function getFormattedTokens(code: string, options: Options): string; diff --git a/node_modules/sucrase/dist/index.js b/node_modules/sucrase/dist/index.js new file mode 100644 index 00000000..fcae0561 --- /dev/null +++ b/node_modules/sucrase/dist/index.js @@ -0,0 +1,152 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _CJSImportProcessor = require('./CJSImportProcessor'); var _CJSImportProcessor2 = _interopRequireDefault(_CJSImportProcessor); +var _computeSourceMap = require('./computeSourceMap'); var _computeSourceMap2 = _interopRequireDefault(_computeSourceMap); +var _identifyShadowedGlobals = require('./identifyShadowedGlobals'); var _identifyShadowedGlobals2 = _interopRequireDefault(_identifyShadowedGlobals); +var _NameManager = require('./NameManager'); var _NameManager2 = _interopRequireDefault(_NameManager); +var _parser = require('./parser'); + +var _TokenProcessor = require('./TokenProcessor'); var _TokenProcessor2 = _interopRequireDefault(_TokenProcessor); +var _RootTransformer = require('./transformers/RootTransformer'); var _RootTransformer2 = _interopRequireDefault(_RootTransformer); +var _formatTokens = require('./util/formatTokens'); var _formatTokens2 = _interopRequireDefault(_formatTokens); +var _getTSImportedNames = require('./util/getTSImportedNames'); var _getTSImportedNames2 = _interopRequireDefault(_getTSImportedNames); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + function getVersion() { + // eslint-disable-next-line + return require("../package.json").version; +} exports.getVersion = getVersion; + + function transform(code, options) { + try { + const sucraseContext = getSucraseContext(code, options); + const transformer = new (0, _RootTransformer2.default)( + sucraseContext, + options.transforms, + Boolean(options.enableLegacyBabel5ModuleInterop), + options, + ); + let result = {code: transformer.transform()}; + if (options.sourceMapOptions) { + if (!options.filePath) { + throw new Error("filePath must be specified when generating a source map."); + } + result = { + ...result, + sourceMap: _computeSourceMap2.default.call(void 0, result.code, options.filePath, options.sourceMapOptions), + }; + } + return result; + } catch (e) { + if (options.filePath) { + e.message = `Error transforming ${options.filePath}: ${e.message}`; + } + throw e; + } +} exports.transform = transform; + +/** + * Return a string representation of the sucrase tokens, mostly useful for + * diagnostic purposes. + */ + function getFormattedTokens(code, options) { + const tokens = getSucraseContext(code, options).tokenProcessor.tokens; + return _formatTokens2.default.call(void 0, code, tokens); +} exports.getFormattedTokens = getFormattedTokens; + +/** + * Call into the parser/tokenizer and do some further preprocessing: + * - Come up with a set of used names so that we can assign new names. + * - Preprocess all import/export statements so we know which globals we are interested in. + * - Compute situations where any of those globals are shadowed. + * + * In the future, some of these preprocessing steps can be skipped based on what actual work is + * being done. + */ +function getSucraseContext(code, options) { + const isJSXEnabled = options.transforms.includes("jsx"); + const isTypeScriptEnabled = options.transforms.includes("typescript"); + const isFlowEnabled = options.transforms.includes("flow"); + const file = _parser.parse.call(void 0, code, isJSXEnabled, isTypeScriptEnabled, isFlowEnabled); + const tokens = file.tokens; + const scopes = file.scopes; + + const tokenProcessor = new (0, _TokenProcessor2.default)(code, tokens, isFlowEnabled); + const nameManager = new (0, _NameManager2.default)(tokenProcessor); + nameManager.preprocessNames(); + const enableLegacyTypeScriptModuleInterop = Boolean(options.enableLegacyTypeScriptModuleInterop); + + let importProcessor = null; + if (options.transforms.includes("imports")) { + importProcessor = new (0, _CJSImportProcessor2.default)( + nameManager, + tokenProcessor, + enableLegacyTypeScriptModuleInterop, + options, + options.transforms.includes("typescript"), + ); + importProcessor.preprocessTokens(); + // We need to mark shadowed globals after processing imports so we know that the globals are, + // but before type-only import pruning, since that relies on shadowing information. + _identifyShadowedGlobals2.default.call(void 0, tokenProcessor, scopes, importProcessor.getGlobalNames()); + if (options.transforms.includes("typescript")) { + importProcessor.pruneTypeOnlyImports(); + } + } else if (options.transforms.includes("typescript")) { + _identifyShadowedGlobals2.default.call(void 0, tokenProcessor, scopes, _getTSImportedNames2.default.call(void 0, tokenProcessor)); + } + return {tokenProcessor, scopes, nameManager, importProcessor}; +} diff --git a/node_modules/sucrase/dist/index.mjs b/node_modules/sucrase/dist/index.mjs new file mode 100644 index 00000000..04e42bca --- /dev/null +++ b/node_modules/sucrase/dist/index.mjs @@ -0,0 +1,152 @@ +import CJSImportProcessor from "./CJSImportProcessor"; +import computeSourceMap, {} from "./computeSourceMap"; +import identifyShadowedGlobals from "./identifyShadowedGlobals"; +import NameManager from "./NameManager"; +import {parse} from "./parser"; + +import TokenProcessor from "./TokenProcessor"; +import RootTransformer from "./transformers/RootTransformer"; +import formatTokens from "./util/formatTokens"; +import getTSImportedNames from "./util/getTSImportedNames"; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +export function getVersion() { + // eslint-disable-next-line + return require("../package.json").version; +} + +export function transform(code, options) { + try { + const sucraseContext = getSucraseContext(code, options); + const transformer = new RootTransformer( + sucraseContext, + options.transforms, + Boolean(options.enableLegacyBabel5ModuleInterop), + options, + ); + let result = {code: transformer.transform()}; + if (options.sourceMapOptions) { + if (!options.filePath) { + throw new Error("filePath must be specified when generating a source map."); + } + result = { + ...result, + sourceMap: computeSourceMap(result.code, options.filePath, options.sourceMapOptions), + }; + } + return result; + } catch (e) { + if (options.filePath) { + e.message = `Error transforming ${options.filePath}: ${e.message}`; + } + throw e; + } +} + +/** + * Return a string representation of the sucrase tokens, mostly useful for + * diagnostic purposes. + */ +export function getFormattedTokens(code, options) { + const tokens = getSucraseContext(code, options).tokenProcessor.tokens; + return formatTokens(code, tokens); +} + +/** + * Call into the parser/tokenizer and do some further preprocessing: + * - Come up with a set of used names so that we can assign new names. + * - Preprocess all import/export statements so we know which globals we are interested in. + * - Compute situations where any of those globals are shadowed. + * + * In the future, some of these preprocessing steps can be skipped based on what actual work is + * being done. + */ +function getSucraseContext(code, options) { + const isJSXEnabled = options.transforms.includes("jsx"); + const isTypeScriptEnabled = options.transforms.includes("typescript"); + const isFlowEnabled = options.transforms.includes("flow"); + const file = parse(code, isJSXEnabled, isTypeScriptEnabled, isFlowEnabled); + const tokens = file.tokens; + const scopes = file.scopes; + + const tokenProcessor = new TokenProcessor(code, tokens, isFlowEnabled); + const nameManager = new NameManager(tokenProcessor); + nameManager.preprocessNames(); + const enableLegacyTypeScriptModuleInterop = Boolean(options.enableLegacyTypeScriptModuleInterop); + + let importProcessor = null; + if (options.transforms.includes("imports")) { + importProcessor = new CJSImportProcessor( + nameManager, + tokenProcessor, + enableLegacyTypeScriptModuleInterop, + options, + options.transforms.includes("typescript"), + ); + importProcessor.preprocessTokens(); + // We need to mark shadowed globals after processing imports so we know that the globals are, + // but before type-only import pruning, since that relies on shadowing information. + identifyShadowedGlobals(tokenProcessor, scopes, importProcessor.getGlobalNames()); + if (options.transforms.includes("typescript")) { + importProcessor.pruneTypeOnlyImports(); + } + } else if (options.transforms.includes("typescript")) { + identifyShadowedGlobals(tokenProcessor, scopes, getTSImportedNames(tokenProcessor)); + } + return {tokenProcessor, scopes, nameManager, importProcessor}; +} diff --git a/node_modules/sucrase/dist/parser/index.d.ts b/node_modules/sucrase/dist/parser/index.d.ts new file mode 100644 index 00000000..181f5227 --- /dev/null +++ b/node_modules/sucrase/dist/parser/index.d.ts @@ -0,0 +1,8 @@ +import { Token } from "./tokenizer/index"; +import { Scope } from "./tokenizer/state"; +export declare class File { + tokens: Array; + scopes: Array; + constructor(tokens: Array, scopes: Array); +} +export declare function parse(input: string, isJSXEnabled: boolean, isTypeScriptEnabled: boolean, isFlowEnabled: boolean): File; diff --git a/node_modules/sucrase/dist/parser/index.js b/node_modules/sucrase/dist/parser/index.js new file mode 100644 index 00000000..a8a81c38 --- /dev/null +++ b/node_modules/sucrase/dist/parser/index.js @@ -0,0 +1,31 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + +var _base = require('./traverser/base'); +var _index3 = require('./traverser/index'); + + class File { + + + + constructor(tokens, scopes) { + this.tokens = tokens; + this.scopes = scopes; + } +} exports.File = File; + + function parse( + input, + isJSXEnabled, + isTypeScriptEnabled, + isFlowEnabled, +) { + if (isFlowEnabled && isTypeScriptEnabled) { + throw new Error("Cannot combine flow and typescript plugins."); + } + _base.initParser.call(void 0, input, isJSXEnabled, isTypeScriptEnabled, isFlowEnabled); + const result = _index3.parseFile.call(void 0, ); + if (_base.state.error) { + throw _base.augmentError.call(void 0, _base.state.error); + } + return result; +} exports.parse = parse; diff --git a/node_modules/sucrase/dist/parser/index.mjs b/node_modules/sucrase/dist/parser/index.mjs new file mode 100644 index 00000000..5074ae44 --- /dev/null +++ b/node_modules/sucrase/dist/parser/index.mjs @@ -0,0 +1,31 @@ + + +import {augmentError, initParser, state} from "./traverser/base"; +import {parseFile} from "./traverser/index"; + +export class File { + + + + constructor(tokens, scopes) { + this.tokens = tokens; + this.scopes = scopes; + } +} + +export function parse( + input, + isJSXEnabled, + isTypeScriptEnabled, + isFlowEnabled, +) { + if (isFlowEnabled && isTypeScriptEnabled) { + throw new Error("Cannot combine flow and typescript plugins."); + } + initParser(input, isJSXEnabled, isTypeScriptEnabled, isFlowEnabled); + const result = parseFile(); + if (state.error) { + throw augmentError(state.error); + } + return result; +} diff --git a/node_modules/sucrase/dist/parser/plugins/flow.d.ts b/node_modules/sucrase/dist/parser/plugins/flow.d.ts new file mode 100644 index 00000000..f1d55bae --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/flow.d.ts @@ -0,0 +1,26 @@ +import { ContextualKeyword } from "../tokenizer/keywords"; +import { StopState } from "../traverser/expression"; +export declare function flowParseTypeParameterDeclaration(): void; +export declare function flowParseTypeAnnotation(): void; +export declare function flowParseVariance(): void; +export declare function flowParseFunctionBodyAndFinish(funcContextId: number): void; +export declare function flowParseSubscript(startPos: number, noCalls: boolean, stopState: StopState): void; +export declare function flowStartParseNewArguments(): void; +export declare function flowTryParseStatement(): boolean; +export declare function flowParseIdentifierStatement(contextualKeyword: ContextualKeyword): void; +export declare function flowShouldParseExportDeclaration(): boolean; +export declare function flowShouldDisallowExportDefaultSpecifier(): boolean; +export declare function flowParseExportDeclaration(): void; +export declare function flowShouldParseExportStar(): boolean; +export declare function flowParseExportStar(): void; +export declare function flowAfterParseClassSuper(hasSuper: boolean): void; +export declare function flowStartParseObjPropValue(): void; +export declare function flowParseAssignableListItemTypes(): void; +export declare function flowStartParseImportSpecifiers(): void; +export declare function flowParseImportSpecifier(): void; +export declare function flowStartParseFunctionParams(): void; +export declare function flowAfterParseVarHead(): void; +export declare function flowStartParseAsyncArrowFromCallExpression(): void; +export declare function flowParseMaybeAssign(noIn: boolean, isWithinParens: boolean): boolean; +export declare function flowParseArrow(): boolean; +export declare function flowParseSubscripts(startPos: number, noCalls?: boolean): void; diff --git a/node_modules/sucrase/dist/parser/plugins/flow.js b/node_modules/sucrase/dist/parser/plugins/flow.js new file mode 100644 index 00000000..379a3461 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/flow.js @@ -0,0 +1,1047 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});/* eslint max-len: 0 */ + + + + + + + + + + +var _index = require('../tokenizer/index'); +var _keywords = require('../tokenizer/keywords'); +var _types = require('../tokenizer/types'); +var _base = require('../traverser/base'); + + + + + + + + + + + + + +var _expression = require('../traverser/expression'); + + + + + + + + +var _statement = require('../traverser/statement'); + + + + + + + + + +var _util = require('../traverser/util'); + +function isMaybeDefaultImport(lookahead) { + return ( + (lookahead.type === _types.TokenType.name || !!(lookahead.type & _types.TokenType.IS_KEYWORD)) && + lookahead.contextualKeyword !== _keywords.ContextualKeyword._from + ); +} + +function flowParseTypeInitialiser(tok) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, tok || _types.TokenType.colon); + flowParseType(); + _index.popTypeContext.call(void 0, oldIsType); +} + +function flowParsePredicate() { + _util.expect.call(void 0, _types.TokenType.modulo); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._checks); + if (_index.eat.call(void 0, _types.TokenType.parenL)) { + _expression.parseExpression.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.parenR); + } +} + +function flowParseTypeAndPredicateInitialiser() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, _types.TokenType.colon); + if (_index.match.call(void 0, _types.TokenType.modulo)) { + flowParsePredicate(); + } else { + flowParseType(); + if (_index.match.call(void 0, _types.TokenType.modulo)) { + flowParsePredicate(); + } + } + _index.popTypeContext.call(void 0, oldIsType); +} + +function flowParseDeclareClass() { + _index.next.call(void 0, ); + flowParseInterfaceish(/* isClass */ true); +} + +function flowParseDeclareFunction() { + _index.next.call(void 0, ); + _expression.parseIdentifier.call(void 0, ); + + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + _util.expect.call(void 0, _types.TokenType.parenL); + flowParseFunctionTypeParams(); + _util.expect.call(void 0, _types.TokenType.parenR); + + flowParseTypeAndPredicateInitialiser(); + + _util.semicolon.call(void 0, ); +} + +function flowParseDeclare() { + if (_index.match.call(void 0, _types.TokenType._class)) { + flowParseDeclareClass(); + } else if (_index.match.call(void 0, _types.TokenType._function)) { + flowParseDeclareFunction(); + } else if (_index.match.call(void 0, _types.TokenType._var)) { + flowParseDeclareVariable(); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._module)) { + if (_index.lookaheadType.call(void 0, ) === _types.TokenType.dot) { + flowParseDeclareModuleExports(); + } else { + flowParseDeclareModule(); + } + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._type)) { + flowParseDeclareTypeAlias(); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque)) { + flowParseDeclareOpaqueType(); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._interface)) { + flowParseDeclareInterface(); + } else if (_index.match.call(void 0, _types.TokenType._export)) { + flowParseDeclareExportDeclaration(); + } else { + _util.unexpected.call(void 0, ); + } +} + +function flowParseDeclareVariable() { + _index.next.call(void 0, ); + flowParseTypeAnnotatableIdentifier(); + _util.semicolon.call(void 0, ); +} + +function flowParseDeclareModule() { + _index.next.call(void 0, ); + + if (_index.match.call(void 0, _types.TokenType.string)) { + _expression.parseExprAtom.call(void 0, ); + } else { + _expression.parseIdentifier.call(void 0, ); + } + + _util.expect.call(void 0, _types.TokenType.braceL); + while (!_index.match.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (_index.match.call(void 0, _types.TokenType._import)) { + _index.next.call(void 0, ); + _statement.parseImport.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } + } + _util.expect.call(void 0, _types.TokenType.braceR); +} + +function flowParseDeclareExportDeclaration() { + _util.expect.call(void 0, _types.TokenType._export); + + if (_index.eat.call(void 0, _types.TokenType._default)) { + if (_index.match.call(void 0, _types.TokenType._function) || _index.match.call(void 0, _types.TokenType._class)) { + // declare export default class ... + // declare export default function ... + flowParseDeclare(); + } else { + // declare export default [type]; + flowParseType(); + _util.semicolon.call(void 0, ); + } + } else if ( + _index.match.call(void 0, _types.TokenType._var) || // declare export var ... + _index.match.call(void 0, _types.TokenType._function) || // declare export function ... + _index.match.call(void 0, _types.TokenType._class) || // declare export class ... + _util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque) // declare export opaque .. + ) { + flowParseDeclare(); + } else if ( + _index.match.call(void 0, _types.TokenType.star) || // declare export * from '' + _index.match.call(void 0, _types.TokenType.braceL) || // declare export {} ... + _util.isContextual.call(void 0, _keywords.ContextualKeyword._interface) || // declare export interface ... + _util.isContextual.call(void 0, _keywords.ContextualKeyword._type) || // declare export type ... + _util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque) // declare export opaque type ... + ) { + _statement.parseExport.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } +} + +function flowParseDeclareModuleExports() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._module); + _util.expect.call(void 0, _types.TokenType.dot); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._exports); + flowParseTypeAnnotation(); + _util.semicolon.call(void 0, ); +} + +function flowParseDeclareTypeAlias() { + _index.next.call(void 0, ); + flowParseTypeAlias(); +} + +function flowParseDeclareOpaqueType() { + _index.next.call(void 0, ); + flowParseOpaqueType(true); +} + +function flowParseDeclareInterface() { + _index.next.call(void 0, ); + flowParseInterfaceish(); +} + +// Interfaces + +function flowParseInterfaceish(isClass = false) { + flowParseRestrictedIdentifier(); + + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + if (_index.eat.call(void 0, _types.TokenType._extends)) { + do { + flowParseInterfaceExtends(); + } while (!isClass && _index.eat.call(void 0, _types.TokenType.comma)); + } + + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._mixins)) { + _index.next.call(void 0, ); + do { + flowParseInterfaceExtends(); + } while (_index.eat.call(void 0, _types.TokenType.comma)); + } + + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._implements)) { + _index.next.call(void 0, ); + do { + flowParseInterfaceExtends(); + } while (_index.eat.call(void 0, _types.TokenType.comma)); + } + + flowParseObjectType(isClass, false, isClass); +} + +function flowParseInterfaceExtends() { + flowParseQualifiedTypeIdentifier(false); + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterInstantiation(); + } +} + +function flowParseInterface() { + flowParseInterfaceish(); +} + +function flowParseRestrictedIdentifier() { + _expression.parseIdentifier.call(void 0, ); +} + +function flowParseTypeAlias() { + flowParseRestrictedIdentifier(); + + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + flowParseTypeInitialiser(_types.TokenType.eq); + _util.semicolon.call(void 0, ); +} + +function flowParseOpaqueType(declare) { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._type); + flowParseRestrictedIdentifier(); + + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + // Parse the supertype + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeInitialiser(_types.TokenType.colon); + } + + if (!declare) { + flowParseTypeInitialiser(_types.TokenType.eq); + } + _util.semicolon.call(void 0, ); +} + +function flowParseTypeParameter() { + flowParseVariance(); + flowParseTypeAnnotatableIdentifier(); + + if (_index.eat.call(void 0, _types.TokenType.eq)) { + flowParseType(); + } +} + + function flowParseTypeParameterDeclaration() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + // istanbul ignore else: this condition is already checked at all call sites + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.typeParameterStart)) { + _index.next.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } + + do { + flowParseTypeParameter(); + if (!_index.match.call(void 0, _types.TokenType.greaterThan)) { + _util.expect.call(void 0, _types.TokenType.comma); + } + } while (!_index.match.call(void 0, _types.TokenType.greaterThan) && !_base.state.error); + _util.expect.call(void 0, _types.TokenType.greaterThan); + _index.popTypeContext.call(void 0, oldIsType); +} exports.flowParseTypeParameterDeclaration = flowParseTypeParameterDeclaration; + +function flowParseTypeParameterInstantiation() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, _types.TokenType.lessThan); + while (!_index.match.call(void 0, _types.TokenType.greaterThan) && !_base.state.error) { + flowParseType(); + if (!_index.match.call(void 0, _types.TokenType.greaterThan)) { + _util.expect.call(void 0, _types.TokenType.comma); + } + } + _util.expect.call(void 0, _types.TokenType.greaterThan); + _index.popTypeContext.call(void 0, oldIsType); +} + +function flowParseInterfaceType() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._interface); + if (_index.eat.call(void 0, _types.TokenType._extends)) { + do { + flowParseInterfaceExtends(); + } while (_index.eat.call(void 0, _types.TokenType.comma)); + } + flowParseObjectType(false, false, false); +} + +function flowParseObjectPropertyKey() { + if (_index.match.call(void 0, _types.TokenType.num) || _index.match.call(void 0, _types.TokenType.string)) { + _expression.parseExprAtom.call(void 0, ); + } else { + _expression.parseIdentifier.call(void 0, ); + } +} + +function flowParseObjectTypeIndexer() { + // Note: bracketL has already been consumed + if (_index.lookaheadType.call(void 0, ) === _types.TokenType.colon) { + flowParseObjectPropertyKey(); + flowParseTypeInitialiser(); + } else { + flowParseType(); + } + _util.expect.call(void 0, _types.TokenType.bracketR); + flowParseTypeInitialiser(); +} + +function flowParseObjectTypeInternalSlot() { + // Note: both bracketL have already been consumed + flowParseObjectPropertyKey(); + _util.expect.call(void 0, _types.TokenType.bracketR); + _util.expect.call(void 0, _types.TokenType.bracketR); + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.parenL)) { + flowParseObjectTypeMethodish(); + } else { + _index.eat.call(void 0, _types.TokenType.question); + flowParseTypeInitialiser(); + } +} + +function flowParseObjectTypeMethodish() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + _util.expect.call(void 0, _types.TokenType.parenL); + while (!_index.match.call(void 0, _types.TokenType.parenR) && !_index.match.call(void 0, _types.TokenType.ellipsis) && !_base.state.error) { + flowParseFunctionTypeParam(); + if (!_index.match.call(void 0, _types.TokenType.parenR)) { + _util.expect.call(void 0, _types.TokenType.comma); + } + } + + if (_index.eat.call(void 0, _types.TokenType.ellipsis)) { + flowParseFunctionTypeParam(); + } + _util.expect.call(void 0, _types.TokenType.parenR); + flowParseTypeInitialiser(); +} + +function flowParseObjectTypeCallProperty() { + flowParseObjectTypeMethodish(); +} + +function flowParseObjectType(allowStatic, allowExact, allowProto) { + let endDelim; + if (allowExact && _index.match.call(void 0, _types.TokenType.braceBarL)) { + _util.expect.call(void 0, _types.TokenType.braceBarL); + endDelim = _types.TokenType.braceBarR; + } else { + _util.expect.call(void 0, _types.TokenType.braceL); + endDelim = _types.TokenType.braceR; + } + + while (!_index.match.call(void 0, endDelim) && !_base.state.error) { + if (allowProto && _util.isContextual.call(void 0, _keywords.ContextualKeyword._proto)) { + const lookahead = _index.lookaheadType.call(void 0, ); + if (lookahead !== _types.TokenType.colon && lookahead !== _types.TokenType.question) { + _index.next.call(void 0, ); + allowStatic = false; + } + } + if (allowStatic && _util.isContextual.call(void 0, _keywords.ContextualKeyword._static)) { + const lookahead = _index.lookaheadType.call(void 0, ); + if (lookahead !== _types.TokenType.colon && lookahead !== _types.TokenType.question) { + _index.next.call(void 0, ); + } + } + + flowParseVariance(); + + if (_index.eat.call(void 0, _types.TokenType.bracketL)) { + if (_index.eat.call(void 0, _types.TokenType.bracketL)) { + flowParseObjectTypeInternalSlot(); + } else { + flowParseObjectTypeIndexer(); + } + } else if (_index.match.call(void 0, _types.TokenType.parenL) || _index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseObjectTypeCallProperty(); + } else { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._get) || _util.isContextual.call(void 0, _keywords.ContextualKeyword._set)) { + const lookahead = _index.lookaheadType.call(void 0, ); + if (lookahead === _types.TokenType.name || lookahead === _types.TokenType.string || lookahead === _types.TokenType.num) { + _index.next.call(void 0, ); + } + } + + flowParseObjectTypeProperty(); + } + + flowObjectTypeSemicolon(); + } + + _util.expect.call(void 0, endDelim); +} + +function flowParseObjectTypeProperty() { + if (_index.match.call(void 0, _types.TokenType.ellipsis)) { + _util.expect.call(void 0, _types.TokenType.ellipsis); + if (!_index.eat.call(void 0, _types.TokenType.comma)) { + _index.eat.call(void 0, _types.TokenType.semi); + } + // Explicit inexact object syntax. + if (_index.match.call(void 0, _types.TokenType.braceR)) { + return; + } + flowParseType(); + } else { + flowParseObjectPropertyKey(); + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.parenL)) { + // This is a method property + flowParseObjectTypeMethodish(); + } else { + _index.eat.call(void 0, _types.TokenType.question); + flowParseTypeInitialiser(); + } + } +} + +function flowObjectTypeSemicolon() { + if (!_index.eat.call(void 0, _types.TokenType.semi) && !_index.eat.call(void 0, _types.TokenType.comma) && !_index.match.call(void 0, _types.TokenType.braceR) && !_index.match.call(void 0, _types.TokenType.braceBarR)) { + _util.unexpected.call(void 0, ); + } +} + +function flowParseQualifiedTypeIdentifier(initialIdAlreadyParsed) { + if (!initialIdAlreadyParsed) { + _expression.parseIdentifier.call(void 0, ); + } + while (_index.eat.call(void 0, _types.TokenType.dot)) { + _expression.parseIdentifier.call(void 0, ); + } +} + +function flowParseGenericType() { + flowParseQualifiedTypeIdentifier(true); + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterInstantiation(); + } +} + +function flowParseTypeofType() { + _util.expect.call(void 0, _types.TokenType._typeof); + flowParsePrimaryType(); +} + +function flowParseTupleType() { + _util.expect.call(void 0, _types.TokenType.bracketL); + // We allow trailing commas + while (_base.state.pos < _base.input.length && !_index.match.call(void 0, _types.TokenType.bracketR)) { + flowParseType(); + if (_index.match.call(void 0, _types.TokenType.bracketR)) { + break; + } + _util.expect.call(void 0, _types.TokenType.comma); + } + _util.expect.call(void 0, _types.TokenType.bracketR); +} + +function flowParseFunctionTypeParam() { + const lookahead = _index.lookaheadType.call(void 0, ); + if (lookahead === _types.TokenType.colon || lookahead === _types.TokenType.question) { + _expression.parseIdentifier.call(void 0, ); + _index.eat.call(void 0, _types.TokenType.question); + flowParseTypeInitialiser(); + } else { + flowParseType(); + } +} + +function flowParseFunctionTypeParams() { + while (!_index.match.call(void 0, _types.TokenType.parenR) && !_index.match.call(void 0, _types.TokenType.ellipsis) && !_base.state.error) { + flowParseFunctionTypeParam(); + if (!_index.match.call(void 0, _types.TokenType.parenR)) { + _util.expect.call(void 0, _types.TokenType.comma); + } + } + if (_index.eat.call(void 0, _types.TokenType.ellipsis)) { + flowParseFunctionTypeParam(); + } +} + +// The parsing of types roughly parallels the parsing of expressions, and +// primary types are kind of like primary expressions...they're the +// primitives with which other types are constructed. +function flowParsePrimaryType() { + let isGroupedType = false; + const oldNoAnonFunctionType = _base.state.noAnonFunctionType; + + switch (_base.state.type) { + case _types.TokenType.name: { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._interface)) { + flowParseInterfaceType(); + return; + } + _expression.parseIdentifier.call(void 0, ); + flowParseGenericType(); + return; + } + + case _types.TokenType.braceL: + flowParseObjectType(false, false, false); + return; + + case _types.TokenType.braceBarL: + flowParseObjectType(false, true, false); + return; + + case _types.TokenType.bracketL: + flowParseTupleType(); + return; + + case _types.TokenType.lessThan: + flowParseTypeParameterDeclaration(); + _util.expect.call(void 0, _types.TokenType.parenL); + flowParseFunctionTypeParams(); + _util.expect.call(void 0, _types.TokenType.parenR); + _util.expect.call(void 0, _types.TokenType.arrow); + flowParseType(); + return; + + case _types.TokenType.parenL: + _index.next.call(void 0, ); + + // Check to see if this is actually a grouped type + if (!_index.match.call(void 0, _types.TokenType.parenR) && !_index.match.call(void 0, _types.TokenType.ellipsis)) { + if (_index.match.call(void 0, _types.TokenType.name)) { + const token = _index.lookaheadType.call(void 0, ); + isGroupedType = token !== _types.TokenType.question && token !== _types.TokenType.colon; + } else { + isGroupedType = true; + } + } + + if (isGroupedType) { + _base.state.noAnonFunctionType = false; + flowParseType(); + _base.state.noAnonFunctionType = oldNoAnonFunctionType; + + // A `,` or a `) =>` means this is an anonymous function type + if ( + _base.state.noAnonFunctionType || + !(_index.match.call(void 0, _types.TokenType.comma) || (_index.match.call(void 0, _types.TokenType.parenR) && _index.lookaheadType.call(void 0, ) === _types.TokenType.arrow)) + ) { + _util.expect.call(void 0, _types.TokenType.parenR); + return; + } else { + // Eat a comma if there is one + _index.eat.call(void 0, _types.TokenType.comma); + } + } + + flowParseFunctionTypeParams(); + + _util.expect.call(void 0, _types.TokenType.parenR); + _util.expect.call(void 0, _types.TokenType.arrow); + flowParseType(); + return; + + case _types.TokenType.minus: + _index.next.call(void 0, ); + _expression.parseLiteral.call(void 0, ); + return; + + case _types.TokenType.string: + case _types.TokenType.num: + case _types.TokenType._true: + case _types.TokenType._false: + case _types.TokenType._null: + case _types.TokenType._this: + case _types.TokenType._void: + case _types.TokenType.star: + _index.next.call(void 0, ); + return; + + default: + if (_base.state.type === _types.TokenType._typeof) { + flowParseTypeofType(); + return; + } else if (_base.state.type & _types.TokenType.IS_KEYWORD) { + _index.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType.name; + return; + } + } + + _util.unexpected.call(void 0, ); +} + +function flowParsePostfixType() { + flowParsePrimaryType(); + while (!_util.canInsertSemicolon.call(void 0, ) && _index.match.call(void 0, _types.TokenType.bracketL)) { + _util.expect.call(void 0, _types.TokenType.bracketL); + _util.expect.call(void 0, _types.TokenType.bracketR); + } +} + +function flowParsePrefixType() { + if (_index.eat.call(void 0, _types.TokenType.question)) { + flowParsePrefixType(); + } else { + flowParsePostfixType(); + } +} + +function flowParseAnonFunctionWithoutParens() { + flowParsePrefixType(); + if (!_base.state.noAnonFunctionType && _index.eat.call(void 0, _types.TokenType.arrow)) { + flowParseType(); + } +} + +function flowParseIntersectionType() { + _index.eat.call(void 0, _types.TokenType.bitwiseAND); + flowParseAnonFunctionWithoutParens(); + while (_index.eat.call(void 0, _types.TokenType.bitwiseAND)) { + flowParseAnonFunctionWithoutParens(); + } +} + +function flowParseUnionType() { + _index.eat.call(void 0, _types.TokenType.bitwiseOR); + flowParseIntersectionType(); + while (_index.eat.call(void 0, _types.TokenType.bitwiseOR)) { + flowParseIntersectionType(); + } +} + +function flowParseType() { + flowParseUnionType(); +} + + function flowParseTypeAnnotation() { + flowParseTypeInitialiser(); +} exports.flowParseTypeAnnotation = flowParseTypeAnnotation; + +function flowParseTypeAnnotatableIdentifier() { + _expression.parseIdentifier.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeAnnotation(); + } +} + + function flowParseVariance() { + if (_index.match.call(void 0, _types.TokenType.plus) || _index.match.call(void 0, _types.TokenType.minus)) { + _index.next.call(void 0, ); + } +} exports.flowParseVariance = flowParseVariance; + +// ================================== +// Overrides +// ================================== + + function flowParseFunctionBodyAndFinish(funcContextId) { + // For arrow functions, `parseArrow` handles the return type itself. + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeAndPredicateInitialiser(); + } + + _expression.parseFunctionBody.call(void 0, false, funcContextId); +} exports.flowParseFunctionBodyAndFinish = flowParseFunctionBodyAndFinish; + + function flowParseSubscript(startPos, noCalls, stopState) { + if (_index.match.call(void 0, _types.TokenType.questionDot) && _index.lookaheadType.call(void 0, ) === _types.TokenType.lessThan) { + if (noCalls) { + stopState.stop = true; + return; + } + _index.next.call(void 0, ); + flowParseTypeParameterInstantiation(); + _util.expect.call(void 0, _types.TokenType.parenL); + _expression.parseCallExpressionArguments.call(void 0, ); + return; + } else if (!noCalls && _index.match.call(void 0, _types.TokenType.lessThan)) { + const snapshot = _base.state.snapshot(); + flowParseTypeParameterInstantiation(); + _util.expect.call(void 0, _types.TokenType.parenL); + _expression.parseCallExpressionArguments.call(void 0, ); + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } else { + return; + } + } + _expression.baseParseSubscript.call(void 0, startPos, noCalls, stopState); +} exports.flowParseSubscript = flowParseSubscript; + + function flowStartParseNewArguments() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + const snapshot = _base.state.snapshot(); + flowParseTypeParameterInstantiation(); + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } + } +} exports.flowStartParseNewArguments = flowStartParseNewArguments; + +// interfaces + function flowTryParseStatement() { + if (_index.match.call(void 0, _types.TokenType.name) && _base.state.contextualKeyword === _keywords.ContextualKeyword._interface) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.next.call(void 0, ); + flowParseInterface(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } else { + return false; + } +} exports.flowTryParseStatement = flowTryParseStatement; + +// declares, interfaces and type aliases + function flowParseIdentifierStatement(contextualKeyword) { + if (contextualKeyword === _keywords.ContextualKeyword._declare) { + if ( + _index.match.call(void 0, _types.TokenType._class) || + _index.match.call(void 0, _types.TokenType.name) || + _index.match.call(void 0, _types.TokenType._function) || + _index.match.call(void 0, _types.TokenType._var) || + _index.match.call(void 0, _types.TokenType._export) + ) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + flowParseDeclare(); + _index.popTypeContext.call(void 0, oldIsType); + } + } else if (_index.match.call(void 0, _types.TokenType.name)) { + if (contextualKeyword === _keywords.ContextualKeyword._interface) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + flowParseInterface(); + _index.popTypeContext.call(void 0, oldIsType); + } else if (contextualKeyword === _keywords.ContextualKeyword._type) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + flowParseTypeAlias(); + _index.popTypeContext.call(void 0, oldIsType); + } else if (contextualKeyword === _keywords.ContextualKeyword._opaque) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + flowParseOpaqueType(false); + _index.popTypeContext.call(void 0, oldIsType); + } + } + _util.semicolon.call(void 0, ); +} exports.flowParseIdentifierStatement = flowParseIdentifierStatement; + +// export type + function flowShouldParseExportDeclaration() { + return ( + _util.isContextual.call(void 0, _keywords.ContextualKeyword._type) || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._interface) || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque) + ); +} exports.flowShouldParseExportDeclaration = flowShouldParseExportDeclaration; + + function flowShouldDisallowExportDefaultSpecifier() { + return ( + _index.match.call(void 0, _types.TokenType.name) && + (_base.state.contextualKeyword === _keywords.ContextualKeyword._type || + _base.state.contextualKeyword === _keywords.ContextualKeyword._interface || + _base.state.contextualKeyword === _keywords.ContextualKeyword._opaque) + ); +} exports.flowShouldDisallowExportDefaultSpecifier = flowShouldDisallowExportDefaultSpecifier; + + function flowParseExportDeclaration() { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._type)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _index.next.call(void 0, ); + + if (_index.match.call(void 0, _types.TokenType.braceL)) { + // export type { foo, bar }; + _statement.parseExportSpecifiers.call(void 0, ); + _statement.parseExportFrom.call(void 0, ); + } else { + // export type Foo = Bar; + flowParseTypeAlias(); + } + _index.popTypeContext.call(void 0, oldIsType); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _index.next.call(void 0, ); + // export opaque type Foo = Bar; + flowParseOpaqueType(false); + _index.popTypeContext.call(void 0, oldIsType); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._interface)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _index.next.call(void 0, ); + flowParseInterface(); + _index.popTypeContext.call(void 0, oldIsType); + } else { + _statement.parseStatement.call(void 0, true); + } +} exports.flowParseExportDeclaration = flowParseExportDeclaration; + + function flowShouldParseExportStar() { + return _index.match.call(void 0, _types.TokenType.star) || (_util.isContextual.call(void 0, _keywords.ContextualKeyword._type) && _index.lookaheadType.call(void 0, ) === _types.TokenType.star); +} exports.flowShouldParseExportStar = flowShouldParseExportStar; + + function flowParseExportStar() { + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._type)) { + const oldIsType = _index.pushTypeContext.call(void 0, 2); + _statement.baseParseExportStar.call(void 0, ); + _index.popTypeContext.call(void 0, oldIsType); + } else { + _statement.baseParseExportStar.call(void 0, ); + } +} exports.flowParseExportStar = flowParseExportStar; + +// parse a the super class type parameters and implements + function flowAfterParseClassSuper(hasSuper) { + if (hasSuper && _index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterInstantiation(); + } + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._implements)) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._implements; + do { + flowParseRestrictedIdentifier(); + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterInstantiation(); + } + } while (_index.eat.call(void 0, _types.TokenType.comma)); + _index.popTypeContext.call(void 0, oldIsType); + } +} exports.flowAfterParseClassSuper = flowAfterParseClassSuper; + +// parse type parameters for object method shorthand + function flowStartParseObjPropValue() { + // method shorthand + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + if (!_index.match.call(void 0, _types.TokenType.parenL)) _util.unexpected.call(void 0, ); + } +} exports.flowStartParseObjPropValue = flowStartParseObjPropValue; + + function flowParseAssignableListItemTypes() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.eat.call(void 0, _types.TokenType.question); + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeAnnotation(); + } + _index.popTypeContext.call(void 0, oldIsType); +} exports.flowParseAssignableListItemTypes = flowParseAssignableListItemTypes; + +// parse typeof and type imports + function flowStartParseImportSpecifiers() { + if (_index.match.call(void 0, _types.TokenType._typeof) || _util.isContextual.call(void 0, _keywords.ContextualKeyword._type)) { + const lh = _index.lookaheadTypeAndKeyword.call(void 0, ); + if (isMaybeDefaultImport(lh) || lh.type === _types.TokenType.braceL || lh.type === _types.TokenType.star) { + _index.next.call(void 0, ); + } + } +} exports.flowStartParseImportSpecifiers = flowStartParseImportSpecifiers; + +// parse import-type/typeof shorthand + function flowParseImportSpecifier() { + const isTypeKeyword = + _base.state.contextualKeyword === _keywords.ContextualKeyword._type || _base.state.type === _types.TokenType._typeof; + if (isTypeKeyword) { + _index.next.call(void 0, ); + } else { + _expression.parseIdentifier.call(void 0, ); + } + + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._as) && !_util.isLookaheadContextual.call(void 0, _keywords.ContextualKeyword._as)) { + _expression.parseIdentifier.call(void 0, ); + if (isTypeKeyword && !_index.match.call(void 0, _types.TokenType.name) && !(_base.state.type & _types.TokenType.IS_KEYWORD)) { + // `import {type as ,` or `import {type as }` + } else { + // `import {type as foo` + _expression.parseIdentifier.call(void 0, ); + } + } else if (isTypeKeyword && (_index.match.call(void 0, _types.TokenType.name) || !!(_base.state.type & _types.TokenType.IS_KEYWORD))) { + // `import {type foo` + _expression.parseIdentifier.call(void 0, ); + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._as)) { + _expression.parseIdentifier.call(void 0, ); + } + } +} exports.flowParseImportSpecifier = flowParseImportSpecifier; + +// parse function type parameters - function foo() {} + function flowStartParseFunctionParams() { + // Originally this checked if the method is a getter/setter, but if it was, we'd crash soon + // anyway, so don't try to propagate that information. + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + flowParseTypeParameterDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + } +} exports.flowStartParseFunctionParams = flowStartParseFunctionParams; + +// parse flow type annotations on variable declarator heads - let foo: string = bar + function flowAfterParseVarHead() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeAnnotation(); + } +} exports.flowAfterParseVarHead = flowAfterParseVarHead; + +// parse the return type of an async arrow function - let foo = (async (): number => {}); + function flowStartParseAsyncArrowFromCallExpression() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + const oldNoAnonFunctionType = _base.state.noAnonFunctionType; + _base.state.noAnonFunctionType = true; + flowParseTypeAnnotation(); + _base.state.noAnonFunctionType = oldNoAnonFunctionType; + } +} exports.flowStartParseAsyncArrowFromCallExpression = flowStartParseAsyncArrowFromCallExpression; + +// We need to support type parameter declarations for arrow functions. This +// is tricky. There are three situations we need to handle +// +// 1. This is either JSX or an arrow function. We'll try JSX first. If that +// fails, we'll try an arrow function. If that fails, we'll throw the JSX +// error. +// 2. This is an arrow function. We'll parse the type parameter declaration, +// parse the rest, make sure the rest is an arrow function, and go from +// there +// 3. This is neither. Just call the super method + function flowParseMaybeAssign(noIn, isWithinParens) { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + const snapshot = _base.state.snapshot(); + let wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + _base.state.type = _types.TokenType.typeParameterStart; + } else { + return wasArrow; + } + + const oldIsType = _index.pushTypeContext.call(void 0, 0); + flowParseTypeParameterDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (wasArrow) { + return true; + } + _util.unexpected.call(void 0, ); + } + + return _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); +} exports.flowParseMaybeAssign = flowParseMaybeAssign; + +// handle return types for arrow functions + function flowParseArrow() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + const snapshot = _base.state.snapshot(); + + const oldNoAnonFunctionType = _base.state.noAnonFunctionType; + _base.state.noAnonFunctionType = true; + flowParseTypeAndPredicateInitialiser(); + _base.state.noAnonFunctionType = oldNoAnonFunctionType; + + if (_util.canInsertSemicolon.call(void 0, )) _util.unexpected.call(void 0, ); + if (!_index.match.call(void 0, _types.TokenType.arrow)) _util.unexpected.call(void 0, ); + + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } + _index.popTypeContext.call(void 0, oldIsType); + } + return _index.eat.call(void 0, _types.TokenType.arrow); +} exports.flowParseArrow = flowParseArrow; + + function flowParseSubscripts(startPos, noCalls = false) { + if ( + _base.state.tokens[_base.state.tokens.length - 1].contextualKeyword === _keywords.ContextualKeyword._async && + _index.match.call(void 0, _types.TokenType.lessThan) + ) { + const snapshot = _base.state.snapshot(); + const wasArrow = parseAsyncArrowWithTypeParameters(); + if (wasArrow && !_base.state.error) { + return; + } + _base.state.restoreFromSnapshot(snapshot); + } + + _expression.baseParseSubscripts.call(void 0, startPos, noCalls); +} exports.flowParseSubscripts = flowParseSubscripts; + +// Returns true if there was an arrow function here. +function parseAsyncArrowWithTypeParameters() { + _base.state.scopeDepth++; + const startTokenIndex = _base.state.tokens.length; + _statement.parseFunctionParams.call(void 0, ); + if (!_expression.parseArrow.call(void 0, )) { + return false; + } + _expression.parseArrowExpression.call(void 0, startTokenIndex); + return true; +} diff --git a/node_modules/sucrase/dist/parser/plugins/flow.mjs b/node_modules/sucrase/dist/parser/plugins/flow.mjs new file mode 100644 index 00000000..0d470de8 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/flow.mjs @@ -0,0 +1,1047 @@ +/* eslint max-len: 0 */ + +import { + eat, + lookaheadType, + lookaheadTypeAndKeyword, + match, + next, + popTypeContext, + pushTypeContext, + +} from "../tokenizer/index"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {TokenType, TokenType as tt} from "../tokenizer/types"; +import {input, state} from "../traverser/base"; +import { + baseParseMaybeAssign, + baseParseSubscript, + baseParseSubscripts, + parseArrow, + parseArrowExpression, + parseCallExpressionArguments, + parseExprAtom, + parseExpression, + parseFunctionBody, + parseIdentifier, + parseLiteral, + +} from "../traverser/expression"; +import { + baseParseExportStar, + parseExport, + parseExportFrom, + parseExportSpecifiers, + parseFunctionParams, + parseImport, + parseStatement, +} from "../traverser/statement"; +import { + canInsertSemicolon, + eatContextual, + expect, + expectContextual, + isContextual, + isLookaheadContextual, + semicolon, + unexpected, +} from "../traverser/util"; + +function isMaybeDefaultImport(lookahead) { + return ( + (lookahead.type === tt.name || !!(lookahead.type & TokenType.IS_KEYWORD)) && + lookahead.contextualKeyword !== ContextualKeyword._from + ); +} + +function flowParseTypeInitialiser(tok) { + const oldIsType = pushTypeContext(0); + expect(tok || tt.colon); + flowParseType(); + popTypeContext(oldIsType); +} + +function flowParsePredicate() { + expect(tt.modulo); + expectContextual(ContextualKeyword._checks); + if (eat(tt.parenL)) { + parseExpression(); + expect(tt.parenR); + } +} + +function flowParseTypeAndPredicateInitialiser() { + const oldIsType = pushTypeContext(0); + expect(tt.colon); + if (match(tt.modulo)) { + flowParsePredicate(); + } else { + flowParseType(); + if (match(tt.modulo)) { + flowParsePredicate(); + } + } + popTypeContext(oldIsType); +} + +function flowParseDeclareClass() { + next(); + flowParseInterfaceish(/* isClass */ true); +} + +function flowParseDeclareFunction() { + next(); + parseIdentifier(); + + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + expect(tt.parenL); + flowParseFunctionTypeParams(); + expect(tt.parenR); + + flowParseTypeAndPredicateInitialiser(); + + semicolon(); +} + +function flowParseDeclare() { + if (match(tt._class)) { + flowParseDeclareClass(); + } else if (match(tt._function)) { + flowParseDeclareFunction(); + } else if (match(tt._var)) { + flowParseDeclareVariable(); + } else if (isContextual(ContextualKeyword._module)) { + if (lookaheadType() === tt.dot) { + flowParseDeclareModuleExports(); + } else { + flowParseDeclareModule(); + } + } else if (isContextual(ContextualKeyword._type)) { + flowParseDeclareTypeAlias(); + } else if (isContextual(ContextualKeyword._opaque)) { + flowParseDeclareOpaqueType(); + } else if (isContextual(ContextualKeyword._interface)) { + flowParseDeclareInterface(); + } else if (match(tt._export)) { + flowParseDeclareExportDeclaration(); + } else { + unexpected(); + } +} + +function flowParseDeclareVariable() { + next(); + flowParseTypeAnnotatableIdentifier(); + semicolon(); +} + +function flowParseDeclareModule() { + next(); + + if (match(tt.string)) { + parseExprAtom(); + } else { + parseIdentifier(); + } + + expect(tt.braceL); + while (!match(tt.braceR) && !state.error) { + if (match(tt._import)) { + next(); + parseImport(); + } else { + unexpected(); + } + } + expect(tt.braceR); +} + +function flowParseDeclareExportDeclaration() { + expect(tt._export); + + if (eat(tt._default)) { + if (match(tt._function) || match(tt._class)) { + // declare export default class ... + // declare export default function ... + flowParseDeclare(); + } else { + // declare export default [type]; + flowParseType(); + semicolon(); + } + } else if ( + match(tt._var) || // declare export var ... + match(tt._function) || // declare export function ... + match(tt._class) || // declare export class ... + isContextual(ContextualKeyword._opaque) // declare export opaque .. + ) { + flowParseDeclare(); + } else if ( + match(tt.star) || // declare export * from '' + match(tt.braceL) || // declare export {} ... + isContextual(ContextualKeyword._interface) || // declare export interface ... + isContextual(ContextualKeyword._type) || // declare export type ... + isContextual(ContextualKeyword._opaque) // declare export opaque type ... + ) { + parseExport(); + } else { + unexpected(); + } +} + +function flowParseDeclareModuleExports() { + expectContextual(ContextualKeyword._module); + expect(tt.dot); + expectContextual(ContextualKeyword._exports); + flowParseTypeAnnotation(); + semicolon(); +} + +function flowParseDeclareTypeAlias() { + next(); + flowParseTypeAlias(); +} + +function flowParseDeclareOpaqueType() { + next(); + flowParseOpaqueType(true); +} + +function flowParseDeclareInterface() { + next(); + flowParseInterfaceish(); +} + +// Interfaces + +function flowParseInterfaceish(isClass = false) { + flowParseRestrictedIdentifier(); + + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + if (eat(tt._extends)) { + do { + flowParseInterfaceExtends(); + } while (!isClass && eat(tt.comma)); + } + + if (isContextual(ContextualKeyword._mixins)) { + next(); + do { + flowParseInterfaceExtends(); + } while (eat(tt.comma)); + } + + if (isContextual(ContextualKeyword._implements)) { + next(); + do { + flowParseInterfaceExtends(); + } while (eat(tt.comma)); + } + + flowParseObjectType(isClass, false, isClass); +} + +function flowParseInterfaceExtends() { + flowParseQualifiedTypeIdentifier(false); + if (match(tt.lessThan)) { + flowParseTypeParameterInstantiation(); + } +} + +function flowParseInterface() { + flowParseInterfaceish(); +} + +function flowParseRestrictedIdentifier() { + parseIdentifier(); +} + +function flowParseTypeAlias() { + flowParseRestrictedIdentifier(); + + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + flowParseTypeInitialiser(tt.eq); + semicolon(); +} + +function flowParseOpaqueType(declare) { + expectContextual(ContextualKeyword._type); + flowParseRestrictedIdentifier(); + + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + // Parse the supertype + if (match(tt.colon)) { + flowParseTypeInitialiser(tt.colon); + } + + if (!declare) { + flowParseTypeInitialiser(tt.eq); + } + semicolon(); +} + +function flowParseTypeParameter() { + flowParseVariance(); + flowParseTypeAnnotatableIdentifier(); + + if (eat(tt.eq)) { + flowParseType(); + } +} + +export function flowParseTypeParameterDeclaration() { + const oldIsType = pushTypeContext(0); + // istanbul ignore else: this condition is already checked at all call sites + if (match(tt.lessThan) || match(tt.typeParameterStart)) { + next(); + } else { + unexpected(); + } + + do { + flowParseTypeParameter(); + if (!match(tt.greaterThan)) { + expect(tt.comma); + } + } while (!match(tt.greaterThan) && !state.error); + expect(tt.greaterThan); + popTypeContext(oldIsType); +} + +function flowParseTypeParameterInstantiation() { + const oldIsType = pushTypeContext(0); + expect(tt.lessThan); + while (!match(tt.greaterThan) && !state.error) { + flowParseType(); + if (!match(tt.greaterThan)) { + expect(tt.comma); + } + } + expect(tt.greaterThan); + popTypeContext(oldIsType); +} + +function flowParseInterfaceType() { + expectContextual(ContextualKeyword._interface); + if (eat(tt._extends)) { + do { + flowParseInterfaceExtends(); + } while (eat(tt.comma)); + } + flowParseObjectType(false, false, false); +} + +function flowParseObjectPropertyKey() { + if (match(tt.num) || match(tt.string)) { + parseExprAtom(); + } else { + parseIdentifier(); + } +} + +function flowParseObjectTypeIndexer() { + // Note: bracketL has already been consumed + if (lookaheadType() === tt.colon) { + flowParseObjectPropertyKey(); + flowParseTypeInitialiser(); + } else { + flowParseType(); + } + expect(tt.bracketR); + flowParseTypeInitialiser(); +} + +function flowParseObjectTypeInternalSlot() { + // Note: both bracketL have already been consumed + flowParseObjectPropertyKey(); + expect(tt.bracketR); + expect(tt.bracketR); + if (match(tt.lessThan) || match(tt.parenL)) { + flowParseObjectTypeMethodish(); + } else { + eat(tt.question); + flowParseTypeInitialiser(); + } +} + +function flowParseObjectTypeMethodish() { + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + expect(tt.parenL); + while (!match(tt.parenR) && !match(tt.ellipsis) && !state.error) { + flowParseFunctionTypeParam(); + if (!match(tt.parenR)) { + expect(tt.comma); + } + } + + if (eat(tt.ellipsis)) { + flowParseFunctionTypeParam(); + } + expect(tt.parenR); + flowParseTypeInitialiser(); +} + +function flowParseObjectTypeCallProperty() { + flowParseObjectTypeMethodish(); +} + +function flowParseObjectType(allowStatic, allowExact, allowProto) { + let endDelim; + if (allowExact && match(tt.braceBarL)) { + expect(tt.braceBarL); + endDelim = tt.braceBarR; + } else { + expect(tt.braceL); + endDelim = tt.braceR; + } + + while (!match(endDelim) && !state.error) { + if (allowProto && isContextual(ContextualKeyword._proto)) { + const lookahead = lookaheadType(); + if (lookahead !== tt.colon && lookahead !== tt.question) { + next(); + allowStatic = false; + } + } + if (allowStatic && isContextual(ContextualKeyword._static)) { + const lookahead = lookaheadType(); + if (lookahead !== tt.colon && lookahead !== tt.question) { + next(); + } + } + + flowParseVariance(); + + if (eat(tt.bracketL)) { + if (eat(tt.bracketL)) { + flowParseObjectTypeInternalSlot(); + } else { + flowParseObjectTypeIndexer(); + } + } else if (match(tt.parenL) || match(tt.lessThan)) { + flowParseObjectTypeCallProperty(); + } else { + if (isContextual(ContextualKeyword._get) || isContextual(ContextualKeyword._set)) { + const lookahead = lookaheadType(); + if (lookahead === tt.name || lookahead === tt.string || lookahead === tt.num) { + next(); + } + } + + flowParseObjectTypeProperty(); + } + + flowObjectTypeSemicolon(); + } + + expect(endDelim); +} + +function flowParseObjectTypeProperty() { + if (match(tt.ellipsis)) { + expect(tt.ellipsis); + if (!eat(tt.comma)) { + eat(tt.semi); + } + // Explicit inexact object syntax. + if (match(tt.braceR)) { + return; + } + flowParseType(); + } else { + flowParseObjectPropertyKey(); + if (match(tt.lessThan) || match(tt.parenL)) { + // This is a method property + flowParseObjectTypeMethodish(); + } else { + eat(tt.question); + flowParseTypeInitialiser(); + } + } +} + +function flowObjectTypeSemicolon() { + if (!eat(tt.semi) && !eat(tt.comma) && !match(tt.braceR) && !match(tt.braceBarR)) { + unexpected(); + } +} + +function flowParseQualifiedTypeIdentifier(initialIdAlreadyParsed) { + if (!initialIdAlreadyParsed) { + parseIdentifier(); + } + while (eat(tt.dot)) { + parseIdentifier(); + } +} + +function flowParseGenericType() { + flowParseQualifiedTypeIdentifier(true); + if (match(tt.lessThan)) { + flowParseTypeParameterInstantiation(); + } +} + +function flowParseTypeofType() { + expect(tt._typeof); + flowParsePrimaryType(); +} + +function flowParseTupleType() { + expect(tt.bracketL); + // We allow trailing commas + while (state.pos < input.length && !match(tt.bracketR)) { + flowParseType(); + if (match(tt.bracketR)) { + break; + } + expect(tt.comma); + } + expect(tt.bracketR); +} + +function flowParseFunctionTypeParam() { + const lookahead = lookaheadType(); + if (lookahead === tt.colon || lookahead === tt.question) { + parseIdentifier(); + eat(tt.question); + flowParseTypeInitialiser(); + } else { + flowParseType(); + } +} + +function flowParseFunctionTypeParams() { + while (!match(tt.parenR) && !match(tt.ellipsis) && !state.error) { + flowParseFunctionTypeParam(); + if (!match(tt.parenR)) { + expect(tt.comma); + } + } + if (eat(tt.ellipsis)) { + flowParseFunctionTypeParam(); + } +} + +// The parsing of types roughly parallels the parsing of expressions, and +// primary types are kind of like primary expressions...they're the +// primitives with which other types are constructed. +function flowParsePrimaryType() { + let isGroupedType = false; + const oldNoAnonFunctionType = state.noAnonFunctionType; + + switch (state.type) { + case tt.name: { + if (isContextual(ContextualKeyword._interface)) { + flowParseInterfaceType(); + return; + } + parseIdentifier(); + flowParseGenericType(); + return; + } + + case tt.braceL: + flowParseObjectType(false, false, false); + return; + + case tt.braceBarL: + flowParseObjectType(false, true, false); + return; + + case tt.bracketL: + flowParseTupleType(); + return; + + case tt.lessThan: + flowParseTypeParameterDeclaration(); + expect(tt.parenL); + flowParseFunctionTypeParams(); + expect(tt.parenR); + expect(tt.arrow); + flowParseType(); + return; + + case tt.parenL: + next(); + + // Check to see if this is actually a grouped type + if (!match(tt.parenR) && !match(tt.ellipsis)) { + if (match(tt.name)) { + const token = lookaheadType(); + isGroupedType = token !== tt.question && token !== tt.colon; + } else { + isGroupedType = true; + } + } + + if (isGroupedType) { + state.noAnonFunctionType = false; + flowParseType(); + state.noAnonFunctionType = oldNoAnonFunctionType; + + // A `,` or a `) =>` means this is an anonymous function type + if ( + state.noAnonFunctionType || + !(match(tt.comma) || (match(tt.parenR) && lookaheadType() === tt.arrow)) + ) { + expect(tt.parenR); + return; + } else { + // Eat a comma if there is one + eat(tt.comma); + } + } + + flowParseFunctionTypeParams(); + + expect(tt.parenR); + expect(tt.arrow); + flowParseType(); + return; + + case tt.minus: + next(); + parseLiteral(); + return; + + case tt.string: + case tt.num: + case tt._true: + case tt._false: + case tt._null: + case tt._this: + case tt._void: + case tt.star: + next(); + return; + + default: + if (state.type === tt._typeof) { + flowParseTypeofType(); + return; + } else if (state.type & TokenType.IS_KEYWORD) { + next(); + state.tokens[state.tokens.length - 1].type = tt.name; + return; + } + } + + unexpected(); +} + +function flowParsePostfixType() { + flowParsePrimaryType(); + while (!canInsertSemicolon() && match(tt.bracketL)) { + expect(tt.bracketL); + expect(tt.bracketR); + } +} + +function flowParsePrefixType() { + if (eat(tt.question)) { + flowParsePrefixType(); + } else { + flowParsePostfixType(); + } +} + +function flowParseAnonFunctionWithoutParens() { + flowParsePrefixType(); + if (!state.noAnonFunctionType && eat(tt.arrow)) { + flowParseType(); + } +} + +function flowParseIntersectionType() { + eat(tt.bitwiseAND); + flowParseAnonFunctionWithoutParens(); + while (eat(tt.bitwiseAND)) { + flowParseAnonFunctionWithoutParens(); + } +} + +function flowParseUnionType() { + eat(tt.bitwiseOR); + flowParseIntersectionType(); + while (eat(tt.bitwiseOR)) { + flowParseIntersectionType(); + } +} + +function flowParseType() { + flowParseUnionType(); +} + +export function flowParseTypeAnnotation() { + flowParseTypeInitialiser(); +} + +function flowParseTypeAnnotatableIdentifier() { + parseIdentifier(); + if (match(tt.colon)) { + flowParseTypeAnnotation(); + } +} + +export function flowParseVariance() { + if (match(tt.plus) || match(tt.minus)) { + next(); + } +} + +// ================================== +// Overrides +// ================================== + +export function flowParseFunctionBodyAndFinish(funcContextId) { + // For arrow functions, `parseArrow` handles the return type itself. + if (match(tt.colon)) { + flowParseTypeAndPredicateInitialiser(); + } + + parseFunctionBody(false, funcContextId); +} + +export function flowParseSubscript(startPos, noCalls, stopState) { + if (match(tt.questionDot) && lookaheadType() === tt.lessThan) { + if (noCalls) { + stopState.stop = true; + return; + } + next(); + flowParseTypeParameterInstantiation(); + expect(tt.parenL); + parseCallExpressionArguments(); + return; + } else if (!noCalls && match(tt.lessThan)) { + const snapshot = state.snapshot(); + flowParseTypeParameterInstantiation(); + expect(tt.parenL); + parseCallExpressionArguments(); + if (state.error) { + state.restoreFromSnapshot(snapshot); + } else { + return; + } + } + baseParseSubscript(startPos, noCalls, stopState); +} + +export function flowStartParseNewArguments() { + if (match(tt.lessThan)) { + const snapshot = state.snapshot(); + flowParseTypeParameterInstantiation(); + if (state.error) { + state.restoreFromSnapshot(snapshot); + } + } +} + +// interfaces +export function flowTryParseStatement() { + if (match(tt.name) && state.contextualKeyword === ContextualKeyword._interface) { + const oldIsType = pushTypeContext(0); + next(); + flowParseInterface(); + popTypeContext(oldIsType); + return true; + } else { + return false; + } +} + +// declares, interfaces and type aliases +export function flowParseIdentifierStatement(contextualKeyword) { + if (contextualKeyword === ContextualKeyword._declare) { + if ( + match(tt._class) || + match(tt.name) || + match(tt._function) || + match(tt._var) || + match(tt._export) + ) { + const oldIsType = pushTypeContext(1); + flowParseDeclare(); + popTypeContext(oldIsType); + } + } else if (match(tt.name)) { + if (contextualKeyword === ContextualKeyword._interface) { + const oldIsType = pushTypeContext(1); + flowParseInterface(); + popTypeContext(oldIsType); + } else if (contextualKeyword === ContextualKeyword._type) { + const oldIsType = pushTypeContext(1); + flowParseTypeAlias(); + popTypeContext(oldIsType); + } else if (contextualKeyword === ContextualKeyword._opaque) { + const oldIsType = pushTypeContext(1); + flowParseOpaqueType(false); + popTypeContext(oldIsType); + } + } + semicolon(); +} + +// export type +export function flowShouldParseExportDeclaration() { + return ( + isContextual(ContextualKeyword._type) || + isContextual(ContextualKeyword._interface) || + isContextual(ContextualKeyword._opaque) + ); +} + +export function flowShouldDisallowExportDefaultSpecifier() { + return ( + match(tt.name) && + (state.contextualKeyword === ContextualKeyword._type || + state.contextualKeyword === ContextualKeyword._interface || + state.contextualKeyword === ContextualKeyword._opaque) + ); +} + +export function flowParseExportDeclaration() { + if (isContextual(ContextualKeyword._type)) { + const oldIsType = pushTypeContext(1); + next(); + + if (match(tt.braceL)) { + // export type { foo, bar }; + parseExportSpecifiers(); + parseExportFrom(); + } else { + // export type Foo = Bar; + flowParseTypeAlias(); + } + popTypeContext(oldIsType); + } else if (isContextual(ContextualKeyword._opaque)) { + const oldIsType = pushTypeContext(1); + next(); + // export opaque type Foo = Bar; + flowParseOpaqueType(false); + popTypeContext(oldIsType); + } else if (isContextual(ContextualKeyword._interface)) { + const oldIsType = pushTypeContext(1); + next(); + flowParseInterface(); + popTypeContext(oldIsType); + } else { + parseStatement(true); + } +} + +export function flowShouldParseExportStar() { + return match(tt.star) || (isContextual(ContextualKeyword._type) && lookaheadType() === tt.star); +} + +export function flowParseExportStar() { + if (eatContextual(ContextualKeyword._type)) { + const oldIsType = pushTypeContext(2); + baseParseExportStar(); + popTypeContext(oldIsType); + } else { + baseParseExportStar(); + } +} + +// parse a the super class type parameters and implements +export function flowAfterParseClassSuper(hasSuper) { + if (hasSuper && match(tt.lessThan)) { + flowParseTypeParameterInstantiation(); + } + if (isContextual(ContextualKeyword._implements)) { + const oldIsType = pushTypeContext(0); + next(); + state.tokens[state.tokens.length - 1].type = tt._implements; + do { + flowParseRestrictedIdentifier(); + if (match(tt.lessThan)) { + flowParseTypeParameterInstantiation(); + } + } while (eat(tt.comma)); + popTypeContext(oldIsType); + } +} + +// parse type parameters for object method shorthand +export function flowStartParseObjPropValue() { + // method shorthand + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + if (!match(tt.parenL)) unexpected(); + } +} + +export function flowParseAssignableListItemTypes() { + const oldIsType = pushTypeContext(0); + eat(tt.question); + if (match(tt.colon)) { + flowParseTypeAnnotation(); + } + popTypeContext(oldIsType); +} + +// parse typeof and type imports +export function flowStartParseImportSpecifiers() { + if (match(tt._typeof) || isContextual(ContextualKeyword._type)) { + const lh = lookaheadTypeAndKeyword(); + if (isMaybeDefaultImport(lh) || lh.type === tt.braceL || lh.type === tt.star) { + next(); + } + } +} + +// parse import-type/typeof shorthand +export function flowParseImportSpecifier() { + const isTypeKeyword = + state.contextualKeyword === ContextualKeyword._type || state.type === tt._typeof; + if (isTypeKeyword) { + next(); + } else { + parseIdentifier(); + } + + if (isContextual(ContextualKeyword._as) && !isLookaheadContextual(ContextualKeyword._as)) { + parseIdentifier(); + if (isTypeKeyword && !match(tt.name) && !(state.type & TokenType.IS_KEYWORD)) { + // `import {type as ,` or `import {type as }` + } else { + // `import {type as foo` + parseIdentifier(); + } + } else if (isTypeKeyword && (match(tt.name) || !!(state.type & TokenType.IS_KEYWORD))) { + // `import {type foo` + parseIdentifier(); + if (eatContextual(ContextualKeyword._as)) { + parseIdentifier(); + } + } +} + +// parse function type parameters - function foo() {} +export function flowStartParseFunctionParams() { + // Originally this checked if the method is a getter/setter, but if it was, we'd crash soon + // anyway, so don't try to propagate that information. + if (match(tt.lessThan)) { + const oldIsType = pushTypeContext(0); + flowParseTypeParameterDeclaration(); + popTypeContext(oldIsType); + } +} + +// parse flow type annotations on variable declarator heads - let foo: string = bar +export function flowAfterParseVarHead() { + if (match(tt.colon)) { + flowParseTypeAnnotation(); + } +} + +// parse the return type of an async arrow function - let foo = (async (): number => {}); +export function flowStartParseAsyncArrowFromCallExpression() { + if (match(tt.colon)) { + const oldNoAnonFunctionType = state.noAnonFunctionType; + state.noAnonFunctionType = true; + flowParseTypeAnnotation(); + state.noAnonFunctionType = oldNoAnonFunctionType; + } +} + +// We need to support type parameter declarations for arrow functions. This +// is tricky. There are three situations we need to handle +// +// 1. This is either JSX or an arrow function. We'll try JSX first. If that +// fails, we'll try an arrow function. If that fails, we'll throw the JSX +// error. +// 2. This is an arrow function. We'll parse the type parameter declaration, +// parse the rest, make sure the rest is an arrow function, and go from +// there +// 3. This is neither. Just call the super method +export function flowParseMaybeAssign(noIn, isWithinParens) { + if (match(tt.lessThan)) { + const snapshot = state.snapshot(); + let wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (state.error) { + state.restoreFromSnapshot(snapshot); + state.type = tt.typeParameterStart; + } else { + return wasArrow; + } + + const oldIsType = pushTypeContext(0); + flowParseTypeParameterDeclaration(); + popTypeContext(oldIsType); + wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (wasArrow) { + return true; + } + unexpected(); + } + + return baseParseMaybeAssign(noIn, isWithinParens); +} + +// handle return types for arrow functions +export function flowParseArrow() { + if (match(tt.colon)) { + const oldIsType = pushTypeContext(0); + const snapshot = state.snapshot(); + + const oldNoAnonFunctionType = state.noAnonFunctionType; + state.noAnonFunctionType = true; + flowParseTypeAndPredicateInitialiser(); + state.noAnonFunctionType = oldNoAnonFunctionType; + + if (canInsertSemicolon()) unexpected(); + if (!match(tt.arrow)) unexpected(); + + if (state.error) { + state.restoreFromSnapshot(snapshot); + } + popTypeContext(oldIsType); + } + return eat(tt.arrow); +} + +export function flowParseSubscripts(startPos, noCalls = false) { + if ( + state.tokens[state.tokens.length - 1].contextualKeyword === ContextualKeyword._async && + match(tt.lessThan) + ) { + const snapshot = state.snapshot(); + const wasArrow = parseAsyncArrowWithTypeParameters(); + if (wasArrow && !state.error) { + return; + } + state.restoreFromSnapshot(snapshot); + } + + baseParseSubscripts(startPos, noCalls); +} + +// Returns true if there was an arrow function here. +function parseAsyncArrowWithTypeParameters() { + state.scopeDepth++; + const startTokenIndex = state.tokens.length; + parseFunctionParams(); + if (!parseArrow()) { + return false; + } + parseArrowExpression(startTokenIndex); + return true; +} diff --git a/node_modules/sucrase/dist/parser/plugins/jsx/index.d.ts b/node_modules/sucrase/dist/parser/plugins/jsx/index.d.ts new file mode 100644 index 00000000..2600d0f4 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/jsx/index.d.ts @@ -0,0 +1,2 @@ +export declare function jsxParseElement(): void; +export declare function nextJSXTagToken(): void; diff --git a/node_modules/sucrase/dist/parser/plugins/jsx/index.js b/node_modules/sucrase/dist/parser/plugins/jsx/index.js new file mode 100644 index 00000000..e239c57b --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/jsx/index.js @@ -0,0 +1,309 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + + + + + + + + + +var _index = require('../../tokenizer/index'); +var _types = require('../../tokenizer/types'); +var _base = require('../../traverser/base'); +var _expression = require('../../traverser/expression'); +var _util = require('../../traverser/util'); +var _charcodes = require('../../util/charcodes'); +var _identifier = require('../../util/identifier'); +var _typescript = require('../typescript'); + +// Reads inline JSX contents token. +function jsxReadToken() { + for (;;) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated JSX contents"); + return; + } + + const ch = _base.input.charCodeAt(_base.state.pos); + + switch (ch) { + case _charcodes.charCodes.lessThan: + case _charcodes.charCodes.leftCurlyBrace: + if (_base.state.pos === _base.state.start) { + if (ch === _charcodes.charCodes.lessThan) { + _base.state.pos++; + _index.finishToken.call(void 0, _types.TokenType.jsxTagStart); + return; + } + _index.getTokenFromCode.call(void 0, ch); + return; + } + _index.finishToken.call(void 0, _types.TokenType.jsxText); + return; + + default: + _base.state.pos++; + } + } +} + +function jsxReadString(quote) { + _base.state.pos++; + for (;;) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated string constant"); + return; + } + + const ch = _base.input.charCodeAt(_base.state.pos); + if (ch === quote) { + _base.state.pos++; + break; + } + _base.state.pos++; + } + _index.finishToken.call(void 0, _types.TokenType.string); +} + +// Read a JSX identifier (valid tag or attribute name). +// +// Optimized version since JSX identifiers can't contain +// escape characters and so can be read as single slice. +// Also assumes that first character was already checked +// by isIdentifierStart in readToken. + +function jsxReadWord() { + let ch; + do { + if (_base.state.pos > _base.input.length) { + _util.unexpected.call(void 0, "Unexpectedly reached the end of input."); + return; + } + ch = _base.input.charCodeAt(++_base.state.pos); + } while (_identifier.IS_IDENTIFIER_CHAR[ch] || ch === _charcodes.charCodes.dash); + _index.finishToken.call(void 0, _types.TokenType.jsxName); +} + +// Parse next token as JSX identifier +function jsxParseIdentifier() { + nextJSXTagToken(); +} + +// Parse namespaced identifier. +function jsxParseNamespacedName(identifierRole) { + jsxParseIdentifier(); + if (!_index.eat.call(void 0, _types.TokenType.colon)) { + // Plain identifier, so this is an access. + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = identifierRole; + return; + } + // Process the second half of the namespaced name. + jsxParseIdentifier(); +} + +// Parses element name in any form - namespaced, member +// or single identifier. +function jsxParseElementName() { + jsxParseNamespacedName(_index.IdentifierRole.Access); + while (_index.match.call(void 0, _types.TokenType.dot)) { + nextJSXTagToken(); + jsxParseIdentifier(); + } +} + +// Parses any type of JSX attribute value. +function jsxParseAttributeValue() { + switch (_base.state.type) { + case _types.TokenType.braceL: + jsxParseExpressionContainer(); + nextJSXTagToken(); + return; + + case _types.TokenType.jsxTagStart: + jsxParseElement(); + nextJSXTagToken(); + return; + + case _types.TokenType.string: + nextJSXTagToken(); + return; + + default: + _util.unexpected.call(void 0, "JSX value should be either an expression or a quoted JSX text"); + } +} + +function jsxParseEmptyExpression() { + // Do nothing. +} + +// Parse JSX spread child +// Does not parse the last token. +function jsxParseSpreadChild() { + _util.expect.call(void 0, _types.TokenType.braceL); + _util.expect.call(void 0, _types.TokenType.ellipsis); + _expression.parseExpression.call(void 0, ); +} + +// Parses JSX expression enclosed into curly brackets. +// Does not parse the last token. +function jsxParseExpressionContainer() { + _index.next.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.braceR)) { + jsxParseEmptyExpression(); + } else { + _expression.parseExpression.call(void 0, ); + } +} + +// Parses following JSX attribute name-value pair. +function jsxParseAttribute() { + if (_index.eat.call(void 0, _types.TokenType.braceL)) { + _util.expect.call(void 0, _types.TokenType.ellipsis); + _expression.parseMaybeAssign.call(void 0, ); + // } + nextJSXTagToken(); + return; + } + jsxParseNamespacedName(_index.IdentifierRole.ObjectKey); + if (_index.match.call(void 0, _types.TokenType.eq)) { + nextJSXTagToken(); + jsxParseAttributeValue(); + } +} + +// Parses JSX opening tag starting after "<". +// Returns true if the tag was self-closing. +// Does not parse the last token. +function jsxParseOpeningElement() { + if (_index.match.call(void 0, _types.TokenType.jsxTagEnd)) { + // This is an open-fragment. + return false; + } + jsxParseElementName(); + if (_base.isTypeScriptEnabled) { + _typescript.tsTryParseJSXTypeArgument.call(void 0, ); + } + while (!_index.match.call(void 0, _types.TokenType.slash) && !_index.match.call(void 0, _types.TokenType.jsxTagEnd) && !_base.state.error) { + jsxParseAttribute(); + } + const isSelfClosing = _index.match.call(void 0, _types.TokenType.slash); + if (isSelfClosing) { + // / + nextJSXTagToken(); + } + return isSelfClosing; +} + +// Parses JSX closing tag starting after "= input.length) { + unexpected("Unterminated JSX contents"); + return; + } + + const ch = input.charCodeAt(state.pos); + + switch (ch) { + case charCodes.lessThan: + case charCodes.leftCurlyBrace: + if (state.pos === state.start) { + if (ch === charCodes.lessThan) { + state.pos++; + finishToken(tt.jsxTagStart); + return; + } + getTokenFromCode(ch); + return; + } + finishToken(tt.jsxText); + return; + + default: + state.pos++; + } + } +} + +function jsxReadString(quote) { + state.pos++; + for (;;) { + if (state.pos >= input.length) { + unexpected("Unterminated string constant"); + return; + } + + const ch = input.charCodeAt(state.pos); + if (ch === quote) { + state.pos++; + break; + } + state.pos++; + } + finishToken(tt.string); +} + +// Read a JSX identifier (valid tag or attribute name). +// +// Optimized version since JSX identifiers can't contain +// escape characters and so can be read as single slice. +// Also assumes that first character was already checked +// by isIdentifierStart in readToken. + +function jsxReadWord() { + let ch; + do { + if (state.pos > input.length) { + unexpected("Unexpectedly reached the end of input."); + return; + } + ch = input.charCodeAt(++state.pos); + } while (IS_IDENTIFIER_CHAR[ch] || ch === charCodes.dash); + finishToken(tt.jsxName); +} + +// Parse next token as JSX identifier +function jsxParseIdentifier() { + nextJSXTagToken(); +} + +// Parse namespaced identifier. +function jsxParseNamespacedName(identifierRole) { + jsxParseIdentifier(); + if (!eat(tt.colon)) { + // Plain identifier, so this is an access. + state.tokens[state.tokens.length - 1].identifierRole = identifierRole; + return; + } + // Process the second half of the namespaced name. + jsxParseIdentifier(); +} + +// Parses element name in any form - namespaced, member +// or single identifier. +function jsxParseElementName() { + jsxParseNamespacedName(IdentifierRole.Access); + while (match(tt.dot)) { + nextJSXTagToken(); + jsxParseIdentifier(); + } +} + +// Parses any type of JSX attribute value. +function jsxParseAttributeValue() { + switch (state.type) { + case tt.braceL: + jsxParseExpressionContainer(); + nextJSXTagToken(); + return; + + case tt.jsxTagStart: + jsxParseElement(); + nextJSXTagToken(); + return; + + case tt.string: + nextJSXTagToken(); + return; + + default: + unexpected("JSX value should be either an expression or a quoted JSX text"); + } +} + +function jsxParseEmptyExpression() { + // Do nothing. +} + +// Parse JSX spread child +// Does not parse the last token. +function jsxParseSpreadChild() { + expect(tt.braceL); + expect(tt.ellipsis); + parseExpression(); +} + +// Parses JSX expression enclosed into curly brackets. +// Does not parse the last token. +function jsxParseExpressionContainer() { + next(); + if (match(tt.braceR)) { + jsxParseEmptyExpression(); + } else { + parseExpression(); + } +} + +// Parses following JSX attribute name-value pair. +function jsxParseAttribute() { + if (eat(tt.braceL)) { + expect(tt.ellipsis); + parseMaybeAssign(); + // } + nextJSXTagToken(); + return; + } + jsxParseNamespacedName(IdentifierRole.ObjectKey); + if (match(tt.eq)) { + nextJSXTagToken(); + jsxParseAttributeValue(); + } +} + +// Parses JSX opening tag starting after "<". +// Returns true if the tag was self-closing. +// Does not parse the last token. +function jsxParseOpeningElement() { + if (match(tt.jsxTagEnd)) { + // This is an open-fragment. + return false; + } + jsxParseElementName(); + if (isTypeScriptEnabled) { + tsTryParseJSXTypeArgument(); + } + while (!match(tt.slash) && !match(tt.jsxTagEnd) && !state.error) { + jsxParseAttribute(); + } + const isSelfClosing = match(tt.slash); + if (isSelfClosing) { + // / + nextJSXTagToken(); + } + return isSelfClosing; +} + +// Parses JSX closing tag starting after "", + nbsp: "\u00A0", + iexcl: "\u00A1", + cent: "\u00A2", + pound: "\u00A3", + curren: "\u00A4", + yen: "\u00A5", + brvbar: "\u00A6", + sect: "\u00A7", + uml: "\u00A8", + copy: "\u00A9", + ordf: "\u00AA", + laquo: "\u00AB", + not: "\u00AC", + shy: "\u00AD", + reg: "\u00AE", + macr: "\u00AF", + deg: "\u00B0", + plusmn: "\u00B1", + sup2: "\u00B2", + sup3: "\u00B3", + acute: "\u00B4", + micro: "\u00B5", + para: "\u00B6", + middot: "\u00B7", + cedil: "\u00B8", + sup1: "\u00B9", + ordm: "\u00BA", + raquo: "\u00BB", + frac14: "\u00BC", + frac12: "\u00BD", + frac34: "\u00BE", + iquest: "\u00BF", + Agrave: "\u00C0", + Aacute: "\u00C1", + Acirc: "\u00C2", + Atilde: "\u00C3", + Auml: "\u00C4", + Aring: "\u00C5", + AElig: "\u00C6", + Ccedil: "\u00C7", + Egrave: "\u00C8", + Eacute: "\u00C9", + Ecirc: "\u00CA", + Euml: "\u00CB", + Igrave: "\u00CC", + Iacute: "\u00CD", + Icirc: "\u00CE", + Iuml: "\u00CF", + ETH: "\u00D0", + Ntilde: "\u00D1", + Ograve: "\u00D2", + Oacute: "\u00D3", + Ocirc: "\u00D4", + Otilde: "\u00D5", + Ouml: "\u00D6", + times: "\u00D7", + Oslash: "\u00D8", + Ugrave: "\u00D9", + Uacute: "\u00DA", + Ucirc: "\u00DB", + Uuml: "\u00DC", + Yacute: "\u00DD", + THORN: "\u00DE", + szlig: "\u00DF", + agrave: "\u00E0", + aacute: "\u00E1", + acirc: "\u00E2", + atilde: "\u00E3", + auml: "\u00E4", + aring: "\u00E5", + aelig: "\u00E6", + ccedil: "\u00E7", + egrave: "\u00E8", + eacute: "\u00E9", + ecirc: "\u00EA", + euml: "\u00EB", + igrave: "\u00EC", + iacute: "\u00ED", + icirc: "\u00EE", + iuml: "\u00EF", + eth: "\u00F0", + ntilde: "\u00F1", + ograve: "\u00F2", + oacute: "\u00F3", + ocirc: "\u00F4", + otilde: "\u00F5", + ouml: "\u00F6", + divide: "\u00F7", + oslash: "\u00F8", + ugrave: "\u00F9", + uacute: "\u00FA", + ucirc: "\u00FB", + uuml: "\u00FC", + yacute: "\u00FD", + thorn: "\u00FE", + yuml: "\u00FF", + OElig: "\u0152", + oelig: "\u0153", + Scaron: "\u0160", + scaron: "\u0161", + Yuml: "\u0178", + fnof: "\u0192", + circ: "\u02C6", + tilde: "\u02DC", + Alpha: "\u0391", + Beta: "\u0392", + Gamma: "\u0393", + Delta: "\u0394", + Epsilon: "\u0395", + Zeta: "\u0396", + Eta: "\u0397", + Theta: "\u0398", + Iota: "\u0399", + Kappa: "\u039A", + Lambda: "\u039B", + Mu: "\u039C", + Nu: "\u039D", + Xi: "\u039E", + Omicron: "\u039F", + Pi: "\u03A0", + Rho: "\u03A1", + Sigma: "\u03A3", + Tau: "\u03A4", + Upsilon: "\u03A5", + Phi: "\u03A6", + Chi: "\u03A7", + Psi: "\u03A8", + Omega: "\u03A9", + alpha: "\u03B1", + beta: "\u03B2", + gamma: "\u03B3", + delta: "\u03B4", + epsilon: "\u03B5", + zeta: "\u03B6", + eta: "\u03B7", + theta: "\u03B8", + iota: "\u03B9", + kappa: "\u03BA", + lambda: "\u03BB", + mu: "\u03BC", + nu: "\u03BD", + xi: "\u03BE", + omicron: "\u03BF", + pi: "\u03C0", + rho: "\u03C1", + sigmaf: "\u03C2", + sigma: "\u03C3", + tau: "\u03C4", + upsilon: "\u03C5", + phi: "\u03C6", + chi: "\u03C7", + psi: "\u03C8", + omega: "\u03C9", + thetasym: "\u03D1", + upsih: "\u03D2", + piv: "\u03D6", + ensp: "\u2002", + emsp: "\u2003", + thinsp: "\u2009", + zwnj: "\u200C", + zwj: "\u200D", + lrm: "\u200E", + rlm: "\u200F", + ndash: "\u2013", + mdash: "\u2014", + lsquo: "\u2018", + rsquo: "\u2019", + sbquo: "\u201A", + ldquo: "\u201C", + rdquo: "\u201D", + bdquo: "\u201E", + dagger: "\u2020", + Dagger: "\u2021", + bull: "\u2022", + hellip: "\u2026", + permil: "\u2030", + prime: "\u2032", + Prime: "\u2033", + lsaquo: "\u2039", + rsaquo: "\u203A", + oline: "\u203E", + frasl: "\u2044", + euro: "\u20AC", + image: "\u2111", + weierp: "\u2118", + real: "\u211C", + trade: "\u2122", + alefsym: "\u2135", + larr: "\u2190", + uarr: "\u2191", + rarr: "\u2192", + darr: "\u2193", + harr: "\u2194", + crarr: "\u21B5", + lArr: "\u21D0", + uArr: "\u21D1", + rArr: "\u21D2", + dArr: "\u21D3", + hArr: "\u21D4", + forall: "\u2200", + part: "\u2202", + exist: "\u2203", + empty: "\u2205", + nabla: "\u2207", + isin: "\u2208", + notin: "\u2209", + ni: "\u220B", + prod: "\u220F", + sum: "\u2211", + minus: "\u2212", + lowast: "\u2217", + radic: "\u221A", + prop: "\u221D", + infin: "\u221E", + ang: "\u2220", + and: "\u2227", + or: "\u2228", + cap: "\u2229", + cup: "\u222A", + int: "\u222B", + there4: "\u2234", + sim: "\u223C", + cong: "\u2245", + asymp: "\u2248", + ne: "\u2260", + equiv: "\u2261", + le: "\u2264", + ge: "\u2265", + sub: "\u2282", + sup: "\u2283", + nsub: "\u2284", + sube: "\u2286", + supe: "\u2287", + oplus: "\u2295", + otimes: "\u2297", + perp: "\u22A5", + sdot: "\u22C5", + lceil: "\u2308", + rceil: "\u2309", + lfloor: "\u230A", + rfloor: "\u230B", + lang: "\u2329", + rang: "\u232A", + loz: "\u25CA", + spades: "\u2660", + clubs: "\u2663", + hearts: "\u2665", + diams: "\u2666", +}; +exports. default = entities; diff --git a/node_modules/sucrase/dist/parser/plugins/jsx/xhtml.mjs b/node_modules/sucrase/dist/parser/plugins/jsx/xhtml.mjs new file mode 100644 index 00000000..b1a5e725 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/jsx/xhtml.mjs @@ -0,0 +1,256 @@ +const entities = { + quot: "\u0022", + amp: "&", + apos: "\u0027", + lt: "<", + gt: ">", + nbsp: "\u00A0", + iexcl: "\u00A1", + cent: "\u00A2", + pound: "\u00A3", + curren: "\u00A4", + yen: "\u00A5", + brvbar: "\u00A6", + sect: "\u00A7", + uml: "\u00A8", + copy: "\u00A9", + ordf: "\u00AA", + laquo: "\u00AB", + not: "\u00AC", + shy: "\u00AD", + reg: "\u00AE", + macr: "\u00AF", + deg: "\u00B0", + plusmn: "\u00B1", + sup2: "\u00B2", + sup3: "\u00B3", + acute: "\u00B4", + micro: "\u00B5", + para: "\u00B6", + middot: "\u00B7", + cedil: "\u00B8", + sup1: "\u00B9", + ordm: "\u00BA", + raquo: "\u00BB", + frac14: "\u00BC", + frac12: "\u00BD", + frac34: "\u00BE", + iquest: "\u00BF", + Agrave: "\u00C0", + Aacute: "\u00C1", + Acirc: "\u00C2", + Atilde: "\u00C3", + Auml: "\u00C4", + Aring: "\u00C5", + AElig: "\u00C6", + Ccedil: "\u00C7", + Egrave: "\u00C8", + Eacute: "\u00C9", + Ecirc: "\u00CA", + Euml: "\u00CB", + Igrave: "\u00CC", + Iacute: "\u00CD", + Icirc: "\u00CE", + Iuml: "\u00CF", + ETH: "\u00D0", + Ntilde: "\u00D1", + Ograve: "\u00D2", + Oacute: "\u00D3", + Ocirc: "\u00D4", + Otilde: "\u00D5", + Ouml: "\u00D6", + times: "\u00D7", + Oslash: "\u00D8", + Ugrave: "\u00D9", + Uacute: "\u00DA", + Ucirc: "\u00DB", + Uuml: "\u00DC", + Yacute: "\u00DD", + THORN: "\u00DE", + szlig: "\u00DF", + agrave: "\u00E0", + aacute: "\u00E1", + acirc: "\u00E2", + atilde: "\u00E3", + auml: "\u00E4", + aring: "\u00E5", + aelig: "\u00E6", + ccedil: "\u00E7", + egrave: "\u00E8", + eacute: "\u00E9", + ecirc: "\u00EA", + euml: "\u00EB", + igrave: "\u00EC", + iacute: "\u00ED", + icirc: "\u00EE", + iuml: "\u00EF", + eth: "\u00F0", + ntilde: "\u00F1", + ograve: "\u00F2", + oacute: "\u00F3", + ocirc: "\u00F4", + otilde: "\u00F5", + ouml: "\u00F6", + divide: "\u00F7", + oslash: "\u00F8", + ugrave: "\u00F9", + uacute: "\u00FA", + ucirc: "\u00FB", + uuml: "\u00FC", + yacute: "\u00FD", + thorn: "\u00FE", + yuml: "\u00FF", + OElig: "\u0152", + oelig: "\u0153", + Scaron: "\u0160", + scaron: "\u0161", + Yuml: "\u0178", + fnof: "\u0192", + circ: "\u02C6", + tilde: "\u02DC", + Alpha: "\u0391", + Beta: "\u0392", + Gamma: "\u0393", + Delta: "\u0394", + Epsilon: "\u0395", + Zeta: "\u0396", + Eta: "\u0397", + Theta: "\u0398", + Iota: "\u0399", + Kappa: "\u039A", + Lambda: "\u039B", + Mu: "\u039C", + Nu: "\u039D", + Xi: "\u039E", + Omicron: "\u039F", + Pi: "\u03A0", + Rho: "\u03A1", + Sigma: "\u03A3", + Tau: "\u03A4", + Upsilon: "\u03A5", + Phi: "\u03A6", + Chi: "\u03A7", + Psi: "\u03A8", + Omega: "\u03A9", + alpha: "\u03B1", + beta: "\u03B2", + gamma: "\u03B3", + delta: "\u03B4", + epsilon: "\u03B5", + zeta: "\u03B6", + eta: "\u03B7", + theta: "\u03B8", + iota: "\u03B9", + kappa: "\u03BA", + lambda: "\u03BB", + mu: "\u03BC", + nu: "\u03BD", + xi: "\u03BE", + omicron: "\u03BF", + pi: "\u03C0", + rho: "\u03C1", + sigmaf: "\u03C2", + sigma: "\u03C3", + tau: "\u03C4", + upsilon: "\u03C5", + phi: "\u03C6", + chi: "\u03C7", + psi: "\u03C8", + omega: "\u03C9", + thetasym: "\u03D1", + upsih: "\u03D2", + piv: "\u03D6", + ensp: "\u2002", + emsp: "\u2003", + thinsp: "\u2009", + zwnj: "\u200C", + zwj: "\u200D", + lrm: "\u200E", + rlm: "\u200F", + ndash: "\u2013", + mdash: "\u2014", + lsquo: "\u2018", + rsquo: "\u2019", + sbquo: "\u201A", + ldquo: "\u201C", + rdquo: "\u201D", + bdquo: "\u201E", + dagger: "\u2020", + Dagger: "\u2021", + bull: "\u2022", + hellip: "\u2026", + permil: "\u2030", + prime: "\u2032", + Prime: "\u2033", + lsaquo: "\u2039", + rsaquo: "\u203A", + oline: "\u203E", + frasl: "\u2044", + euro: "\u20AC", + image: "\u2111", + weierp: "\u2118", + real: "\u211C", + trade: "\u2122", + alefsym: "\u2135", + larr: "\u2190", + uarr: "\u2191", + rarr: "\u2192", + darr: "\u2193", + harr: "\u2194", + crarr: "\u21B5", + lArr: "\u21D0", + uArr: "\u21D1", + rArr: "\u21D2", + dArr: "\u21D3", + hArr: "\u21D4", + forall: "\u2200", + part: "\u2202", + exist: "\u2203", + empty: "\u2205", + nabla: "\u2207", + isin: "\u2208", + notin: "\u2209", + ni: "\u220B", + prod: "\u220F", + sum: "\u2211", + minus: "\u2212", + lowast: "\u2217", + radic: "\u221A", + prop: "\u221D", + infin: "\u221E", + ang: "\u2220", + and: "\u2227", + or: "\u2228", + cap: "\u2229", + cup: "\u222A", + int: "\u222B", + there4: "\u2234", + sim: "\u223C", + cong: "\u2245", + asymp: "\u2248", + ne: "\u2260", + equiv: "\u2261", + le: "\u2264", + ge: "\u2265", + sub: "\u2282", + sup: "\u2283", + nsub: "\u2284", + sube: "\u2286", + supe: "\u2287", + oplus: "\u2295", + otimes: "\u2297", + perp: "\u22A5", + sdot: "\u22C5", + lceil: "\u2308", + rceil: "\u2309", + lfloor: "\u230A", + rfloor: "\u230B", + lang: "\u2329", + rang: "\u232A", + loz: "\u25CA", + spades: "\u2660", + clubs: "\u2663", + hearts: "\u2665", + diams: "\u2666", +}; +export default entities; diff --git a/node_modules/sucrase/dist/parser/plugins/types.d.ts b/node_modules/sucrase/dist/parser/plugins/types.d.ts new file mode 100644 index 00000000..fce6440a --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/types.d.ts @@ -0,0 +1,5 @@ +/** + * Common parser code for TypeScript and Flow. + */ +export declare function typedParseConditional(noIn: boolean): void; +export declare function typedParseParenItem(): void; diff --git a/node_modules/sucrase/dist/parser/plugins/types.js b/node_modules/sucrase/dist/parser/plugins/types.js new file mode 100644 index 00000000..27f4617f --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/types.js @@ -0,0 +1,39 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _index = require('../tokenizer/index'); +var _types = require('../tokenizer/types'); +var _base = require('../traverser/base'); +var _expression = require('../traverser/expression'); +var _flow = require('./flow'); +var _typescript = require('./typescript'); + +/** + * Common parser code for TypeScript and Flow. + */ + +// An apparent conditional expression could actually be an optional parameter in an arrow function. + function typedParseConditional(noIn) { + // If we see ?:, this can't possibly be a valid conditional. typedParseParenItem will be called + // later to finish off the arrow parameter. We also need to handle bare ? tokens for optional + // parameters without type annotations, i.e. ?, and ?) . + if (_index.match.call(void 0, _types.TokenType.question)) { + const nextType = _index.lookaheadType.call(void 0, ); + if (nextType === _types.TokenType.colon || nextType === _types.TokenType.comma || nextType === _types.TokenType.parenR) { + return; + } + } + _expression.baseParseConditional.call(void 0, noIn); +} exports.typedParseConditional = typedParseConditional; + +// Note: These "type casts" are *not* valid TS expressions. +// But we parse them here and change them when completing the arrow function. + function typedParseParenItem() { + if (_index.eat.call(void 0, _types.TokenType.question)) { + _base.state.tokens[_base.state.tokens.length - 1].isType = true; + } + if (_index.match.call(void 0, _types.TokenType.colon)) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseTypeAnnotation.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowParseTypeAnnotation.call(void 0, ); + } + } +} exports.typedParseParenItem = typedParseParenItem; diff --git a/node_modules/sucrase/dist/parser/plugins/types.mjs b/node_modules/sucrase/dist/parser/plugins/types.mjs new file mode 100644 index 00000000..a4cec040 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/types.mjs @@ -0,0 +1,39 @@ +import {eat, lookaheadType, match} from "../tokenizer/index"; +import {TokenType as tt} from "../tokenizer/types"; +import {isFlowEnabled, isTypeScriptEnabled, state} from "../traverser/base"; +import {baseParseConditional} from "../traverser/expression"; +import {flowParseTypeAnnotation} from "./flow"; +import {tsParseTypeAnnotation} from "./typescript"; + +/** + * Common parser code for TypeScript and Flow. + */ + +// An apparent conditional expression could actually be an optional parameter in an arrow function. +export function typedParseConditional(noIn) { + // If we see ?:, this can't possibly be a valid conditional. typedParseParenItem will be called + // later to finish off the arrow parameter. We also need to handle bare ? tokens for optional + // parameters without type annotations, i.e. ?, and ?) . + if (match(tt.question)) { + const nextType = lookaheadType(); + if (nextType === tt.colon || nextType === tt.comma || nextType === tt.parenR) { + return; + } + } + baseParseConditional(noIn); +} + +// Note: These "type casts" are *not* valid TS expressions. +// But we parse them here and change them when completing the arrow function. +export function typedParseParenItem() { + if (eat(tt.question)) { + state.tokens[state.tokens.length - 1].isType = true; + } + if (match(tt.colon)) { + if (isTypeScriptEnabled) { + tsParseTypeAnnotation(); + } else if (isFlowEnabled) { + flowParseTypeAnnotation(); + } + } +} diff --git a/node_modules/sucrase/dist/parser/plugins/typescript.d.ts b/node_modules/sucrase/dist/parser/plugins/typescript.d.ts new file mode 100644 index 00000000..9e6878d1 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/typescript.d.ts @@ -0,0 +1,34 @@ +import { ContextualKeyword } from "../tokenizer/keywords"; +import { StopState } from "../traverser/expression"; +/** Parses a modifier matching one the given modifier names. */ +export declare function tsParseModifier(allowedModifiers: Array): ContextualKeyword | null; +export declare function tsTryParseTypeParameters(): void; +export declare function tsTryParseTypeAnnotation(): void; +export declare function tsParseTypeAnnotation(): void; +export declare function tsParseType(): void; +export declare function tsParseNonConditionalType(): void; +export declare function tsParseTypeAssertion(): void; +export declare function tsTryParseJSXTypeArgument(): void; +export declare function tsParseImportEqualsDeclaration(): void; +export declare function tsIsDeclarationStart(): boolean; +export declare function tsParseFunctionBodyAndFinish(functionStart: number, funcContextId: number): void; +export declare function tsParseSubscript(startPos: number, noCalls: boolean, stopState: StopState): void; +export declare function tsStartParseNewArguments(): void; +export declare function tsTryParseExport(): boolean; +export declare function tsTryParseExportDefaultExpression(): boolean; +export declare function tsTryParseStatementContent(): boolean; +export declare function tsParseAccessModifier(): void; +export declare function tsTryParseClassMemberWithIsStatic(isStatic: boolean, classContextId: number): boolean; +export declare function tsParseIdentifierStatement(contextualKeyword: ContextualKeyword): void; +export declare function tsParseExportDeclaration(): void; +export declare function tsAfterParseClassSuper(hasSuper: boolean): void; +export declare function tsStartParseObjPropValue(): void; +export declare function tsStartParseFunctionParams(): void; +export declare function tsAfterParseVarHead(): void; +export declare function tsStartParseAsyncArrowFromCallExpression(): void; +export declare function tsParseMaybeAssign(noIn: boolean, isWithinParens: boolean): boolean; +export declare function tsParseMaybeAssignWithJSX(noIn: boolean, isWithinParens: boolean): boolean; +export declare function tsParseMaybeAssignWithoutJSX(noIn: boolean, isWithinParens: boolean): boolean; +export declare function tsParseArrow(): boolean; +export declare function tsParseAssignableListItemTypes(): void; +export declare function tsParseMaybeDecoratorArguments(): void; diff --git a/node_modules/sucrase/dist/parser/plugins/typescript.js b/node_modules/sucrase/dist/parser/plugins/typescript.js new file mode 100644 index 00000000..c789b77b --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/typescript.js @@ -0,0 +1,1365 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + + + + + + + +var _index = require('../tokenizer/index'); +var _keywords = require('../tokenizer/keywords'); +var _types = require('../tokenizer/types'); +var _base = require('../traverser/base'); + + + + + + + + + + + + + + + +var _expression = require('../traverser/expression'); +var _lval = require('../traverser/lval'); + + + + + + + + + + + +var _statement = require('../traverser/statement'); + + + + + + + + + + + +var _util = require('../traverser/util'); +var _jsx = require('./jsx'); + +function tsIsIdentifier() { + // TODO: actually a bit more complex in TypeScript, but shouldn't matter. + // See https://github.com/Microsoft/TypeScript/issues/15008 + return _index.match.call(void 0, _types.TokenType.name); +} + +function tsNextTokenCanFollowModifier() { + // Note: TypeScript's implementation is much more complicated because + // more things are considered modifiers there. + // This implementation only handles modifiers not handled by babylon itself. And "static". + // TODO: Would be nice to avoid lookahead. Want a hasLineBreakUpNext() method... + const snapshot = _base.state.snapshot(); + + _index.next.call(void 0, ); + const canFollowModifier = + !_util.hasPrecedingLineBreak.call(void 0, ) && + !_index.match.call(void 0, _types.TokenType.parenL) && + !_index.match.call(void 0, _types.TokenType.parenR) && + !_index.match.call(void 0, _types.TokenType.colon) && + !_index.match.call(void 0, _types.TokenType.eq) && + !_index.match.call(void 0, _types.TokenType.question) && + !_index.match.call(void 0, _types.TokenType.bang); + + if (canFollowModifier) { + return true; + } else { + _base.state.restoreFromSnapshot(snapshot); + return false; + } +} + +/** Parses a modifier matching one the given modifier names. */ + function tsParseModifier( + allowedModifiers, +) { + if (!_index.match.call(void 0, _types.TokenType.name)) { + return null; + } + + const modifier = _base.state.contextualKeyword; + if (allowedModifiers.indexOf(modifier) !== -1 && tsNextTokenCanFollowModifier()) { + switch (modifier) { + case _keywords.ContextualKeyword._readonly: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._readonly; + break; + case _keywords.ContextualKeyword._abstract: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._abstract; + break; + case _keywords.ContextualKeyword._static: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._static; + break; + case _keywords.ContextualKeyword._public: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._public; + break; + case _keywords.ContextualKeyword._private: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._private; + break; + case _keywords.ContextualKeyword._protected: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._protected; + break; + default: + break; + } + return modifier; + } + return null; +} exports.tsParseModifier = tsParseModifier; + +function tsParseEntityName() { + _expression.parseIdentifier.call(void 0, ); + while (_index.eat.call(void 0, _types.TokenType.dot)) { + _expression.parseIdentifier.call(void 0, ); + } +} + +function tsParseTypeReference() { + tsParseEntityName(); + if (!_util.hasPrecedingLineBreak.call(void 0, ) && _index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseThisTypePredicate() { + _index.next.call(void 0, ); + tsParseTypeAnnotation(); +} + +function tsParseThisTypeNode() { + _index.next.call(void 0, ); +} + +function tsParseTypeQuery() { + _util.expect.call(void 0, _types.TokenType._typeof); + if (_index.match.call(void 0, _types.TokenType._import)) { + tsParseImportType(); + } else { + tsParseEntityName(); + } +} + +function tsParseImportType() { + _util.expect.call(void 0, _types.TokenType._import); + _util.expect.call(void 0, _types.TokenType.parenL); + _util.expect.call(void 0, _types.TokenType.string); + _util.expect.call(void 0, _types.TokenType.parenR); + if (_index.eat.call(void 0, _types.TokenType.dot)) { + tsParseEntityName(); + } + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseTypeParameter() { + _expression.parseIdentifier.call(void 0, ); + if (_index.eat.call(void 0, _types.TokenType._extends)) { + tsParseType(); + } + if (_index.eat.call(void 0, _types.TokenType.eq)) { + tsParseType(); + } +} + + function tsTryParseTypeParameters() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeParameters(); + } +} exports.tsTryParseTypeParameters = tsTryParseTypeParameters; + +function tsParseTypeParameters() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.typeParameterStart)) { + _index.next.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } + + while (!_index.eat.call(void 0, _types.TokenType.greaterThan) && !_base.state.error) { + tsParseTypeParameter(); + _index.eat.call(void 0, _types.TokenType.comma); + } + _index.popTypeContext.call(void 0, oldIsType); +} + +// Note: In TypeScript implementation we must provide `yieldContext` and `awaitContext`, +// but here it's always false, because this is only used for types. +function tsFillSignature(returnToken) { + // Arrow fns *must* have return token (`=>`). Normal functions can omit it. + const returnTokenRequired = returnToken === _types.TokenType.arrow; + tsTryParseTypeParameters(); + _util.expect.call(void 0, _types.TokenType.parenL); + // Create a scope even though we're doing type parsing so we don't accidentally + // treat params as top-level bindings. + _base.state.scopeDepth++; + tsParseBindingListForSignature(false /* isBlockScope */); + _base.state.scopeDepth--; + if (returnTokenRequired) { + tsParseTypeOrTypePredicateAnnotation(returnToken); + } else if (_index.match.call(void 0, returnToken)) { + tsParseTypeOrTypePredicateAnnotation(returnToken); + } +} + +function tsParseBindingListForSignature(isBlockScope) { + _lval.parseBindingList.call(void 0, _types.TokenType.parenR, isBlockScope); +} + +function tsParseTypeMemberSemicolon() { + if (!_index.eat.call(void 0, _types.TokenType.comma)) { + _util.semicolon.call(void 0, ); + } +} + +var SignatureMemberKind; (function (SignatureMemberKind) { + const TSCallSignatureDeclaration = 0; SignatureMemberKind[SignatureMemberKind["TSCallSignatureDeclaration"] = TSCallSignatureDeclaration] = "TSCallSignatureDeclaration"; + const TSConstructSignatureDeclaration = TSCallSignatureDeclaration + 1; SignatureMemberKind[SignatureMemberKind["TSConstructSignatureDeclaration"] = TSConstructSignatureDeclaration] = "TSConstructSignatureDeclaration"; +})(SignatureMemberKind || (SignatureMemberKind = {})); + +function tsParseSignatureMember(kind) { + if (kind === SignatureMemberKind.TSConstructSignatureDeclaration) { + _util.expect.call(void 0, _types.TokenType._new); + } + tsFillSignature(_types.TokenType.colon); + tsParseTypeMemberSemicolon(); +} + +function tsIsUnambiguouslyIndexSignature() { + const snapshot = _base.state.snapshot(); + _index.next.call(void 0, ); // Skip '{' + const isIndexSignature = _index.eat.call(void 0, _types.TokenType.name) && _index.match.call(void 0, _types.TokenType.colon); + _base.state.restoreFromSnapshot(snapshot); + return isIndexSignature; +} + +function tsTryParseIndexSignature() { + if (!(_index.match.call(void 0, _types.TokenType.bracketL) && tsIsUnambiguouslyIndexSignature())) { + return false; + } + + const oldIsType = _index.pushTypeContext.call(void 0, 0); + + _util.expect.call(void 0, _types.TokenType.bracketL); + _expression.parseIdentifier.call(void 0, ); + tsParseTypeAnnotation(); + _util.expect.call(void 0, _types.TokenType.bracketR); + + tsTryParseTypeAnnotation(); + tsParseTypeMemberSemicolon(); + + _index.popTypeContext.call(void 0, oldIsType); + return true; +} + +function tsParsePropertyOrMethodSignature(isReadonly) { + _expression.parsePropertyName.call(void 0, -1 /* Types don't need context IDs. */); + _index.eat.call(void 0, _types.TokenType.question); + + if (!isReadonly && (_index.match.call(void 0, _types.TokenType.parenL) || _index.match.call(void 0, _types.TokenType.lessThan))) { + tsFillSignature(_types.TokenType.colon); + tsParseTypeMemberSemicolon(); + } else { + tsTryParseTypeAnnotation(); + tsParseTypeMemberSemicolon(); + } +} + +function tsParseTypeMember() { + if (_index.match.call(void 0, _types.TokenType.parenL) || _index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseSignatureMember(SignatureMemberKind.TSCallSignatureDeclaration); + return; + } + if (_index.match.call(void 0, _types.TokenType._new) && tsIsStartOfConstructSignature()) { + tsParseSignatureMember(SignatureMemberKind.TSConstructSignatureDeclaration); + return; + } + const readonly = !!tsParseModifier([_keywords.ContextualKeyword._readonly]); + + const found = tsTryParseIndexSignature(); + if (found) { + return; + } + tsParsePropertyOrMethodSignature(readonly); +} + +function tsIsStartOfConstructSignature() { + const lookahead = _index.lookaheadType.call(void 0, ); + return lookahead === _types.TokenType.parenL || lookahead === _types.TokenType.lessThan; +} + +function tsParseTypeLiteral() { + tsParseObjectTypeMembers(); +} + +function tsParseObjectTypeMembers() { + _util.expect.call(void 0, _types.TokenType.braceL); + while (!_index.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + tsParseTypeMember(); + } +} + +function tsLookaheadIsStartOfMappedType() { + const snapshot = _base.state.snapshot(); + const isStartOfMappedType = tsIsStartOfMappedType(); + _base.state.restoreFromSnapshot(snapshot); + return isStartOfMappedType; +} + +function tsIsStartOfMappedType() { + _index.next.call(void 0, ); + if (_index.eat.call(void 0, _types.TokenType.plus) || _index.eat.call(void 0, _types.TokenType.minus)) { + return _util.isContextual.call(void 0, _keywords.ContextualKeyword._readonly); + } + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._readonly)) { + _index.next.call(void 0, ); + } + if (!_index.match.call(void 0, _types.TokenType.bracketL)) { + return false; + } + _index.next.call(void 0, ); + if (!tsIsIdentifier()) { + return false; + } + _index.next.call(void 0, ); + return _index.match.call(void 0, _types.TokenType._in); +} + +function tsParseMappedTypeParameter() { + _expression.parseIdentifier.call(void 0, ); + _util.expect.call(void 0, _types.TokenType._in); + tsParseType(); +} + +function tsParseMappedType() { + _util.expect.call(void 0, _types.TokenType.braceL); + if (_index.match.call(void 0, _types.TokenType.plus) || _index.match.call(void 0, _types.TokenType.minus)) { + _index.next.call(void 0, ); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._readonly); + } else { + _util.eatContextual.call(void 0, _keywords.ContextualKeyword._readonly); + } + _util.expect.call(void 0, _types.TokenType.bracketL); + tsParseMappedTypeParameter(); + _util.expect.call(void 0, _types.TokenType.bracketR); + if (_index.match.call(void 0, _types.TokenType.plus) || _index.match.call(void 0, _types.TokenType.minus)) { + _index.next.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.question); + } else { + _index.eat.call(void 0, _types.TokenType.question); + } + tsTryParseType(); + _util.semicolon.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.braceR); +} + +function tsParseTupleType() { + _util.expect.call(void 0, _types.TokenType.bracketL); + while (!_index.eat.call(void 0, _types.TokenType.bracketR) && !_base.state.error) { + tsParseTupleElementType(); + _index.eat.call(void 0, _types.TokenType.comma); + } +} + +function tsParseTupleElementType() { + // parses `...TsType[]` + if (_index.eat.call(void 0, _types.TokenType.ellipsis)) { + tsParseType(); + return; + } + // parses `TsType?` + tsParseType(); + _index.eat.call(void 0, _types.TokenType.question); +} + +function tsParseParenthesizedType() { + _util.expect.call(void 0, _types.TokenType.parenL); + tsParseType(); + _util.expect.call(void 0, _types.TokenType.parenR); +} + +var FunctionType; (function (FunctionType) { + const TSFunctionType = 0; FunctionType[FunctionType["TSFunctionType"] = TSFunctionType] = "TSFunctionType"; + const TSConstructorType = TSFunctionType + 1; FunctionType[FunctionType["TSConstructorType"] = TSConstructorType] = "TSConstructorType"; +})(FunctionType || (FunctionType = {})); + +function tsParseFunctionOrConstructorType(type) { + if (type === FunctionType.TSConstructorType) { + _util.expect.call(void 0, _types.TokenType._new); + } + tsFillSignature(_types.TokenType.arrow); +} + +function tsParseNonArrayType() { + switch (_base.state.type) { + case _types.TokenType.name: + tsParseTypeReference(); + return; + case _types.TokenType._void: + case _types.TokenType._null: + _index.next.call(void 0, ); + return; + case _types.TokenType.string: + case _types.TokenType.num: + case _types.TokenType._true: + case _types.TokenType._false: + _expression.parseLiteral.call(void 0, ); + return; + case _types.TokenType.minus: + _index.next.call(void 0, ); + _expression.parseLiteral.call(void 0, ); + return; + case _types.TokenType._this: { + tsParseThisTypeNode(); + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._is) && !_util.hasPrecedingLineBreak.call(void 0, )) { + tsParseThisTypePredicate(); + } + return; + } + case _types.TokenType._typeof: + tsParseTypeQuery(); + return; + case _types.TokenType._import: + tsParseImportType(); + return; + case _types.TokenType.braceL: + if (tsLookaheadIsStartOfMappedType()) { + tsParseMappedType(); + } else { + tsParseTypeLiteral(); + } + return; + case _types.TokenType.bracketL: + tsParseTupleType(); + return; + case _types.TokenType.parenL: + tsParseParenthesizedType(); + return; + case _types.TokenType.backQuote: + _expression.parseTemplate.call(void 0, ); + return; + default: + if (_base.state.type & _types.TokenType.IS_KEYWORD) { + _index.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType.name; + return; + } + break; + } + + _util.unexpected.call(void 0, ); +} + +function tsParseArrayTypeOrHigher() { + tsParseNonArrayType(); + while (!_util.hasPrecedingLineBreak.call(void 0, ) && _index.eat.call(void 0, _types.TokenType.bracketL)) { + if (!_index.eat.call(void 0, _types.TokenType.bracketR)) { + // If we hit ] immediately, this is an array type, otherwise it's an indexed access type. + tsParseType(); + _util.expect.call(void 0, _types.TokenType.bracketR); + } + } +} + +function tsParseInferType() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._infer); + _expression.parseIdentifier.call(void 0, ); +} + +function tsParseTypeOperatorOrHigher() { + if ( + _util.isContextual.call(void 0, _keywords.ContextualKeyword._keyof) || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._unique) || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._readonly) + ) { + _index.next.call(void 0, ); + tsParseTypeOperatorOrHigher(); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._infer)) { + tsParseInferType(); + } else { + tsParseArrayTypeOrHigher(); + } +} + +function tsParseIntersectionTypeOrHigher() { + _index.eat.call(void 0, _types.TokenType.bitwiseAND); + tsParseTypeOperatorOrHigher(); + if (_index.match.call(void 0, _types.TokenType.bitwiseAND)) { + while (_index.eat.call(void 0, _types.TokenType.bitwiseAND)) { + tsParseTypeOperatorOrHigher(); + } + } +} + +function tsParseUnionTypeOrHigher() { + _index.eat.call(void 0, _types.TokenType.bitwiseOR); + tsParseIntersectionTypeOrHigher(); + if (_index.match.call(void 0, _types.TokenType.bitwiseOR)) { + while (_index.eat.call(void 0, _types.TokenType.bitwiseOR)) { + tsParseIntersectionTypeOrHigher(); + } + } +} + +function tsIsStartOfFunctionType() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + return true; + } + return _index.match.call(void 0, _types.TokenType.parenL) && tsLookaheadIsUnambiguouslyStartOfFunctionType(); +} + +function tsSkipParameterStart() { + if (_index.match.call(void 0, _types.TokenType.name) || _index.match.call(void 0, _types.TokenType._this)) { + _index.next.call(void 0, ); + return true; + } + // If this is a possible array/object destructure, walk to the matching bracket/brace. + // The next token after will tell us definitively whether this is a function param. + if (_index.match.call(void 0, _types.TokenType.braceL) || _index.match.call(void 0, _types.TokenType.bracketL)) { + let depth = 1; + _index.next.call(void 0, ); + while (depth > 0 && !_base.state.error) { + if (_index.match.call(void 0, _types.TokenType.braceL) || _index.match.call(void 0, _types.TokenType.bracketL)) { + depth++; + } else if (_index.match.call(void 0, _types.TokenType.braceR) || _index.match.call(void 0, _types.TokenType.bracketR)) { + depth--; + } + _index.next.call(void 0, ); + } + return true; + } + return false; +} + +function tsLookaheadIsUnambiguouslyStartOfFunctionType() { + const snapshot = _base.state.snapshot(); + const isUnambiguouslyStartOfFunctionType = tsIsUnambiguouslyStartOfFunctionType(); + _base.state.restoreFromSnapshot(snapshot); + return isUnambiguouslyStartOfFunctionType; +} + +function tsIsUnambiguouslyStartOfFunctionType() { + _index.next.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.parenR) || _index.match.call(void 0, _types.TokenType.ellipsis)) { + // ( ) + // ( ... + return true; + } + if (tsSkipParameterStart()) { + if (_index.match.call(void 0, _types.TokenType.colon) || _index.match.call(void 0, _types.TokenType.comma) || _index.match.call(void 0, _types.TokenType.question) || _index.match.call(void 0, _types.TokenType.eq)) { + // ( xxx : + // ( xxx , + // ( xxx ? + // ( xxx = + return true; + } + if (_index.match.call(void 0, _types.TokenType.parenR)) { + _index.next.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.arrow)) { + // ( xxx ) => + return true; + } + } + } + return false; +} + +function tsParseTypeOrTypePredicateAnnotation(returnToken) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, returnToken); + tsParseTypePredicatePrefix(); + // Regardless of whether we found an "is" token, there's now just a regular type in front of + // us. + tsParseType(); + _index.popTypeContext.call(void 0, oldIsType); +} + +function tsTryParseTypeOrTypePredicateAnnotation() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + tsParseTypeOrTypePredicateAnnotation(_types.TokenType.colon); + } +} + + function tsTryParseTypeAnnotation() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + tsParseTypeAnnotation(); + } +} exports.tsTryParseTypeAnnotation = tsTryParseTypeAnnotation; + +function tsTryParseType() { + if (_index.eat.call(void 0, _types.TokenType.colon)) { + tsParseType(); + } +} + +function tsParseTypePredicatePrefix() { + const snapshot = _base.state.snapshot(); + _expression.parseIdentifier.call(void 0, ); + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._is) && !_util.hasPrecedingLineBreak.call(void 0, )) { + _index.next.call(void 0, ); + } else { + _base.state.restoreFromSnapshot(snapshot); + } +} + + function tsParseTypeAnnotation() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, _types.TokenType.colon); + tsParseType(); + _index.popTypeContext.call(void 0, oldIsType); +} exports.tsParseTypeAnnotation = tsParseTypeAnnotation; + + function tsParseType() { + tsParseNonConditionalType(); + if (_util.hasPrecedingLineBreak.call(void 0, ) || !_index.eat.call(void 0, _types.TokenType._extends)) { + return; + } + // extends type + tsParseNonConditionalType(); + _util.expect.call(void 0, _types.TokenType.question); + // true type + tsParseType(); + _util.expect.call(void 0, _types.TokenType.colon); + // false type + tsParseType(); +} exports.tsParseType = tsParseType; + + function tsParseNonConditionalType() { + if (tsIsStartOfFunctionType()) { + tsParseFunctionOrConstructorType(FunctionType.TSFunctionType); + return; + } + if (_index.match.call(void 0, _types.TokenType._new)) { + // As in `new () => Date` + tsParseFunctionOrConstructorType(FunctionType.TSConstructorType); + return; + } + tsParseUnionTypeOrHigher(); +} exports.tsParseNonConditionalType = tsParseNonConditionalType; + + function tsParseTypeAssertion() { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + tsParseType(); + _util.expect.call(void 0, _types.TokenType.greaterThan); + _index.popTypeContext.call(void 0, oldIsType); + _expression.parseMaybeUnary.call(void 0, ); +} exports.tsParseTypeAssertion = tsParseTypeAssertion; + + function tsTryParseJSXTypeArgument() { + if (_index.eat.call(void 0, _types.TokenType.jsxTagStart)) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType.typeParameterStart; + const oldIsType = _index.pushTypeContext.call(void 0, 1); + while (!_index.match.call(void 0, _types.TokenType.greaterThan) && !_base.state.error) { + tsParseType(); + _index.eat.call(void 0, _types.TokenType.comma); + } + // Process >, but the one after needs to be parsed JSX-style. + _jsx.nextJSXTagToken.call(void 0, ); + _index.popTypeContext.call(void 0, oldIsType); + } +} exports.tsTryParseJSXTypeArgument = tsTryParseJSXTypeArgument; + +function tsParseHeritageClause() { + while (!_index.match.call(void 0, _types.TokenType.braceL) && !_base.state.error) { + tsParseExpressionWithTypeArguments(); + _index.eat.call(void 0, _types.TokenType.comma); + } +} + +function tsParseExpressionWithTypeArguments() { + // Note: TS uses parseLeftHandSideExpressionOrHigher, + // then has grammar errors later if it's not an EntityName. + tsParseEntityName(); + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseInterfaceDeclaration() { + _lval.parseBindingIdentifier.call(void 0, false); + tsTryParseTypeParameters(); + if (_index.eat.call(void 0, _types.TokenType._extends)) { + tsParseHeritageClause(); + } + tsParseObjectTypeMembers(); +} + +function tsParseTypeAliasDeclaration() { + _lval.parseBindingIdentifier.call(void 0, false); + tsTryParseTypeParameters(); + _util.expect.call(void 0, _types.TokenType.eq); + tsParseType(); + _util.semicolon.call(void 0, ); +} + +function tsParseEnumMember() { + // Computed property names are grammar errors in an enum, so accept just string literal or identifier. + if (_index.match.call(void 0, _types.TokenType.string)) { + _expression.parseLiteral.call(void 0, ); + } else { + _expression.parseIdentifier.call(void 0, ); + } + if (_index.eat.call(void 0, _types.TokenType.eq)) { + const eqIndex = _base.state.tokens.length - 1; + _expression.parseMaybeAssign.call(void 0, ); + _base.state.tokens[eqIndex].rhsEndIndex = _base.state.tokens.length; + } +} + +function tsParseEnumDeclaration() { + _lval.parseBindingIdentifier.call(void 0, false); + _util.expect.call(void 0, _types.TokenType.braceL); + while (!_index.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + tsParseEnumMember(); + _index.eat.call(void 0, _types.TokenType.comma); + } +} + +function tsParseModuleBlock() { + _util.expect.call(void 0, _types.TokenType.braceL); + _statement.parseBlockBody.call(void 0, /* end */ _types.TokenType.braceR); +} + +function tsParseModuleOrNamespaceDeclaration() { + _lval.parseBindingIdentifier.call(void 0, false); + if (_index.eat.call(void 0, _types.TokenType.dot)) { + tsParseModuleOrNamespaceDeclaration(); + } else { + tsParseModuleBlock(); + } +} + +function tsParseAmbientExternalModuleDeclaration() { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._global)) { + _expression.parseIdentifier.call(void 0, ); + } else if (_index.match.call(void 0, _types.TokenType.string)) { + _expression.parseExprAtom.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } + + if (_index.match.call(void 0, _types.TokenType.braceL)) { + tsParseModuleBlock(); + } else { + _util.semicolon.call(void 0, ); + } +} + + function tsParseImportEqualsDeclaration() { + _lval.parseImportedIdentifier.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.eq); + tsParseModuleReference(); + _util.semicolon.call(void 0, ); +} exports.tsParseImportEqualsDeclaration = tsParseImportEqualsDeclaration; + +function tsIsExternalModuleReference() { + return _util.isContextual.call(void 0, _keywords.ContextualKeyword._require) && _index.lookaheadType.call(void 0, ) === _types.TokenType.parenL; +} + +function tsParseModuleReference() { + if (tsIsExternalModuleReference()) { + tsParseExternalModuleReference(); + } else { + tsParseEntityName(); + } +} + +function tsParseExternalModuleReference() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._require); + _util.expect.call(void 0, _types.TokenType.parenL); + if (!_index.match.call(void 0, _types.TokenType.string)) { + _util.unexpected.call(void 0, ); + } + _expression.parseLiteral.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.parenR); +} + +// Utilities + +// Returns true if a statement matched. +function tsTryParseDeclare() { + if (_util.isLineTerminator.call(void 0, )) { + return false; + } + switch (_base.state.type) { + case _types.TokenType._function: { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _index.next.call(void 0, ); + // We don't need to precisely get the function start here, since it's only used to mark + // the function as a type if it's bodiless, and it's already a type here. + const functionStart = _base.state.start; + _statement.parseFunction.call(void 0, functionStart, /* isStatement */ true); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + case _types.TokenType._class: { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _statement.parseClass.call(void 0, /* isStatement */ true, /* optionalId */ false); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + case _types.TokenType._const: { + if (_index.match.call(void 0, _types.TokenType._const) && _util.isLookaheadContextual.call(void 0, _keywords.ContextualKeyword._enum)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + // `const enum = 0;` not allowed because "enum" is a strict mode reserved word. + _util.expect.call(void 0, _types.TokenType._const); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._enum); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._enum; + tsParseEnumDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + } + // falls through + case _types.TokenType._var: + case _types.TokenType._let: { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _statement.parseVarStatement.call(void 0, _base.state.type); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + case _types.TokenType.name: { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + const contextualKeyword = _base.state.contextualKeyword; + let matched = false; + if (contextualKeyword === _keywords.ContextualKeyword._global) { + tsParseAmbientExternalModuleDeclaration(); + matched = true; + } else { + matched = tsParseDeclaration(contextualKeyword, /* isBeforeToken */ true); + } + _index.popTypeContext.call(void 0, oldIsType); + return matched; + } + default: + return false; + } +} + +// Note: this won't be called unless the keyword is allowed in `shouldParseExportDeclaration`. +// Returns true if it matched a declaration. +function tsTryParseExportDeclaration() { + return tsParseDeclaration(_base.state.contextualKeyword, /* isBeforeToken */ true); +} + +// Returns true if it matched a statement. +function tsParseExpressionStatement(contextualKeyword) { + switch (contextualKeyword) { + case _keywords.ContextualKeyword._declare: { + const declareTokenIndex = _base.state.tokens.length - 1; + const matched = tsTryParseDeclare(); + if (matched) { + _base.state.tokens[declareTokenIndex].type = _types.TokenType._declare; + return true; + } + break; + } + case _keywords.ContextualKeyword._global: + // `global { }` (with no `declare`) may appear inside an ambient module declaration. + // Would like to use tsParseAmbientExternalModuleDeclaration here, but already ran past "global". + if (_index.match.call(void 0, _types.TokenType.braceL)) { + tsParseModuleBlock(); + return true; + } + break; + + default: + return tsParseDeclaration(contextualKeyword, /* isBeforeToken */ false); + } + return false; +} + +// Common to tsTryParseDeclare, tsTryParseExportDeclaration, and tsParseExpressionStatement. +// Returns true if it matched a declaration. +function tsParseDeclaration(contextualKeyword, isBeforeToken) { + switch (contextualKeyword) { + case _keywords.ContextualKeyword._abstract: + if (tsCheckLineTerminatorAndMatch(_types.TokenType._class, isBeforeToken)) { + if (isBeforeToken) _index.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._abstract; + _statement.parseClass.call(void 0, /* isStatement */ true, /* optionalId */ false); + return true; + } + break; + + case _keywords.ContextualKeyword._enum: + if (tsCheckLineTerminatorAndMatch(_types.TokenType.name, isBeforeToken)) { + if (isBeforeToken) _index.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._enum; + tsParseEnumDeclaration(); + return true; + } + break; + + case _keywords.ContextualKeyword._interface: + if (tsCheckLineTerminatorAndMatch(_types.TokenType.name, isBeforeToken)) { + // `next` is true in "export" and "declare" contexts, so we want to remove that token + // as well. + const oldIsType = _index.pushTypeContext.call(void 0, 1); + if (isBeforeToken) _index.next.call(void 0, ); + tsParseInterfaceDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + break; + + case _keywords.ContextualKeyword._module: + if (isBeforeToken) _index.next.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.string)) { + const oldIsType = _index.pushTypeContext.call(void 0, isBeforeToken ? 2 : 1); + tsParseAmbientExternalModuleDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } else if (tsCheckLineTerminatorAndMatch(_types.TokenType.name, isBeforeToken)) { + const oldIsType = _index.pushTypeContext.call(void 0, isBeforeToken ? 2 : 1); + if (isBeforeToken) _index.next.call(void 0, ); + tsParseModuleOrNamespaceDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + break; + + case _keywords.ContextualKeyword._namespace: + if (tsCheckLineTerminatorAndMatch(_types.TokenType.name, isBeforeToken)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + if (isBeforeToken) _index.next.call(void 0, ); + tsParseModuleOrNamespaceDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + break; + + case _keywords.ContextualKeyword._type: + if (tsCheckLineTerminatorAndMatch(_types.TokenType.name, isBeforeToken)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + if (isBeforeToken) _index.next.call(void 0, ); + tsParseTypeAliasDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + break; + + default: + break; + } + return false; +} + +function tsCheckLineTerminatorAndMatch(tokenType, isBeforeToken) { + return !_util.isLineTerminator.call(void 0, ) && (isBeforeToken || _index.match.call(void 0, tokenType)); +} + +// Returns true if there was a generic async arrow function. +function tsTryParseGenericAsyncArrowFunction() { + const snapshot = _base.state.snapshot(); + + tsParseTypeParameters(); + _statement.parseFunctionParams.call(void 0, ); + tsTryParseTypeOrTypePredicateAnnotation(); + _util.expect.call(void 0, _types.TokenType.arrow); + + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + return false; + } + + _expression.parseFunctionBody.call(void 0, true); + return true; +} + +function tsParseTypeArguments() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, _types.TokenType.lessThan); + while (!_index.eat.call(void 0, _types.TokenType.greaterThan) && !_base.state.error) { + tsParseType(); + _index.eat.call(void 0, _types.TokenType.comma); + } + _index.popTypeContext.call(void 0, oldIsType); +} + + function tsIsDeclarationStart() { + if (_index.match.call(void 0, _types.TokenType.name)) { + switch (_base.state.contextualKeyword) { + case _keywords.ContextualKeyword._abstract: + case _keywords.ContextualKeyword._declare: + case _keywords.ContextualKeyword._enum: + case _keywords.ContextualKeyword._interface: + case _keywords.ContextualKeyword._module: + case _keywords.ContextualKeyword._namespace: + case _keywords.ContextualKeyword._type: + return true; + default: + break; + } + } + + return false; +} exports.tsIsDeclarationStart = tsIsDeclarationStart; + +// ====================================================== +// OVERRIDES +// ====================================================== + + function tsParseFunctionBodyAndFinish(functionStart, funcContextId) { + // For arrow functions, `parseArrow` handles the return type itself. + if (_index.match.call(void 0, _types.TokenType.colon)) { + tsParseTypeOrTypePredicateAnnotation(_types.TokenType.colon); + } + + // The original code checked the node type to make sure this function type allows a missing + // body, but we skip that to avoid sending around the node type. We instead just use the + // allowExpressionBody boolean to make sure it's not an arrow function. + if (!_index.match.call(void 0, _types.TokenType.braceL) && _util.isLineTerminator.call(void 0, )) { + // Retroactively mark the function declaration as a type. + let i = _base.state.tokens.length - 1; + while ( + i >= 0 && + (_base.state.tokens[i].start >= functionStart || + _base.state.tokens[i].type === _types.TokenType._default || + _base.state.tokens[i].type === _types.TokenType._export) + ) { + _base.state.tokens[i].isType = true; + i--; + } + return; + } + + _expression.parseFunctionBody.call(void 0, false, funcContextId); +} exports.tsParseFunctionBodyAndFinish = tsParseFunctionBodyAndFinish; + + function tsParseSubscript(startPos, noCalls, stopState) { + if (!_util.hasPrecedingLineBreak.call(void 0, ) && _index.eat.call(void 0, _types.TokenType.bang)) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType.nonNullAssertion; + return; + } + + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + // There are number of things we are going to "maybe" parse, like type arguments on + // tagged template expressions. If any of them fail, walk it back and continue. + const snapshot = _base.state.snapshot(); + + if (!noCalls && _expression.atPossibleAsync.call(void 0, )) { + // Almost certainly this is a generic async function `async () => ... + // But it might be a call with a type argument `async();` + const asyncArrowFn = tsTryParseGenericAsyncArrowFunction(); + if (asyncArrowFn) { + return; + } + } + tsParseTypeArguments(); + if (!noCalls && _index.eat.call(void 0, _types.TokenType.parenL)) { + _expression.parseCallExpressionArguments.call(void 0, ); + } else if (_index.match.call(void 0, _types.TokenType.backQuote)) { + // Tagged template with a type argument. + _expression.parseTemplate.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } + + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } else { + return; + } + } + _expression.baseParseSubscript.call(void 0, startPos, noCalls, stopState); +} exports.tsParseSubscript = tsParseSubscript; + + function tsStartParseNewArguments() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + // 99% certain this is `new C();`. But may be `new C < T;`, which is also legal. + const snapshot = _base.state.snapshot(); + + _base.state.type = _types.TokenType.typeParameterStart; + tsParseTypeArguments(); + if (!_index.match.call(void 0, _types.TokenType.parenL)) { + _util.unexpected.call(void 0, ); + } + + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } + } +} exports.tsStartParseNewArguments = tsStartParseNewArguments; + + function tsTryParseExport() { + if (_index.match.call(void 0, _types.TokenType._import)) { + // `export import A = B;` + _util.expect.call(void 0, _types.TokenType._import); + tsParseImportEqualsDeclaration(); + return true; + } else if (_index.eat.call(void 0, _types.TokenType.eq)) { + // `export = x;` + _expression.parseExpression.call(void 0, ); + _util.semicolon.call(void 0, ); + return true; + } else if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._as)) { + // `export as namespace A;` + // See `parseNamespaceExportDeclaration` in TypeScript's own parser + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._namespace); + _expression.parseIdentifier.call(void 0, ); + _util.semicolon.call(void 0, ); + return true; + } else { + return false; + } +} exports.tsTryParseExport = tsTryParseExport; + + function tsTryParseExportDefaultExpression() { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._abstract) && _index.lookaheadType.call(void 0, ) === _types.TokenType._class) { + _base.state.type = _types.TokenType._abstract; + _index.next.call(void 0, ); // Skip "abstract" + _statement.parseClass.call(void 0, true, true); + return true; + } + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._interface)) { + // Make sure "export default" are considered type tokens so the whole thing is removed. + const oldIsType = _index.pushTypeContext.call(void 0, 2); + tsParseDeclaration(_keywords.ContextualKeyword._interface, true); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + return false; +} exports.tsTryParseExportDefaultExpression = tsTryParseExportDefaultExpression; + + function tsTryParseStatementContent() { + if (_base.state.type === _types.TokenType._const) { + const ahead = _index.lookaheadTypeAndKeyword.call(void 0, ); + if (ahead.type === _types.TokenType.name && ahead.contextualKeyword === _keywords.ContextualKeyword._enum) { + _util.expect.call(void 0, _types.TokenType._const); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._enum); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._enum; + tsParseEnumDeclaration(); + return true; + } + } + return false; +} exports.tsTryParseStatementContent = tsTryParseStatementContent; + + function tsParseAccessModifier() { + tsParseModifier([ + _keywords.ContextualKeyword._public, + _keywords.ContextualKeyword._protected, + _keywords.ContextualKeyword._private, + ]); +} exports.tsParseAccessModifier = tsParseAccessModifier; + + function tsTryParseClassMemberWithIsStatic( + isStatic, + classContextId, +) { + let isAbstract = false; + let isReadonly = false; + + const mod = tsParseModifier([_keywords.ContextualKeyword._abstract, _keywords.ContextualKeyword._readonly]); + switch (mod) { + case _keywords.ContextualKeyword._readonly: + isReadonly = true; + isAbstract = !!tsParseModifier([_keywords.ContextualKeyword._abstract]); + break; + case _keywords.ContextualKeyword._abstract: + isAbstract = true; + isReadonly = !!tsParseModifier([_keywords.ContextualKeyword._readonly]); + break; + default: + break; + } + + // We no longer check for public/private/etc, but tsTryParseIndexSignature should just return + // false in that case for valid code. + if (!isAbstract && !isStatic) { + const found = tsTryParseIndexSignature(); + if (found) { + return true; + } + } + + if (isReadonly) { + // Must be a property (if not an index signature). + _statement.parseClassPropertyName.call(void 0, classContextId); + _statement.parsePostMemberNameModifiers.call(void 0, ); + _statement.parseClassProperty.call(void 0, ); + return true; + } + return false; +} exports.tsTryParseClassMemberWithIsStatic = tsTryParseClassMemberWithIsStatic; + +// Note: The reason we do this in `parseIdentifierStatement` and not `parseStatement` +// is that e.g. `type()` is valid JS, so we must try parsing that first. +// If it's really a type, we will parse `type` as the statement, and can correct it here +// by parsing the rest. + function tsParseIdentifierStatement(contextualKeyword) { + const matched = tsParseExpressionStatement(contextualKeyword); + if (!matched) { + _util.semicolon.call(void 0, ); + } +} exports.tsParseIdentifierStatement = tsParseIdentifierStatement; + + function tsParseExportDeclaration() { + // "export declare" is equivalent to just "export". + const isDeclare = _util.eatContextual.call(void 0, _keywords.ContextualKeyword._declare); + if (isDeclare) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._declare; + } + + let matchedDeclaration = false; + if (_index.match.call(void 0, _types.TokenType.name)) { + if (isDeclare) { + const oldIsType = _index.pushTypeContext.call(void 0, 2); + matchedDeclaration = tsTryParseExportDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + } else { + matchedDeclaration = tsTryParseExportDeclaration(); + } + } + if (!matchedDeclaration) { + if (isDeclare) { + const oldIsType = _index.pushTypeContext.call(void 0, 2); + _statement.parseStatement.call(void 0, true); + _index.popTypeContext.call(void 0, oldIsType); + } else { + _statement.parseStatement.call(void 0, true); + } + } +} exports.tsParseExportDeclaration = tsParseExportDeclaration; + + function tsAfterParseClassSuper(hasSuper) { + if (hasSuper && _index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeArguments(); + } + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._implements)) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._implements; + const oldIsType = _index.pushTypeContext.call(void 0, 1); + tsParseHeritageClause(); + _index.popTypeContext.call(void 0, oldIsType); + } +} exports.tsAfterParseClassSuper = tsAfterParseClassSuper; + + function tsStartParseObjPropValue() { + tsTryParseTypeParameters(); +} exports.tsStartParseObjPropValue = tsStartParseObjPropValue; + + function tsStartParseFunctionParams() { + tsTryParseTypeParameters(); +} exports.tsStartParseFunctionParams = tsStartParseFunctionParams; + +// `let x: number;` + function tsAfterParseVarHead() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.eat.call(void 0, _types.TokenType.bang); + tsTryParseTypeAnnotation(); + _index.popTypeContext.call(void 0, oldIsType); +} exports.tsAfterParseVarHead = tsAfterParseVarHead; + +// parse the return type of an async arrow function - let foo = (async (): number => {}); + function tsStartParseAsyncArrowFromCallExpression() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + tsParseTypeAnnotation(); + } +} exports.tsStartParseAsyncArrowFromCallExpression = tsStartParseAsyncArrowFromCallExpression; + +// Returns true if the expression was an arrow function. + function tsParseMaybeAssign(noIn, isWithinParens) { + // Note: When the JSX plugin is on, type assertions (` x`) aren't valid syntax. + if (_base.isJSXEnabled) { + return tsParseMaybeAssignWithJSX(noIn, isWithinParens); + } else { + return tsParseMaybeAssignWithoutJSX(noIn, isWithinParens); + } +} exports.tsParseMaybeAssign = tsParseMaybeAssign; + + function tsParseMaybeAssignWithJSX(noIn, isWithinParens) { + if (!_index.match.call(void 0, _types.TokenType.lessThan)) { + return _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + } + + // Prefer to parse JSX if possible. But may be an arrow fn. + const snapshot = _base.state.snapshot(); + let wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } else { + return wasArrow; + } + + // Otherwise, try as type-parameterized arrow function. + _base.state.type = _types.TokenType.typeParameterStart; + // This is similar to TypeScript's `tryParseParenthesizedArrowFunctionExpression`. + tsParseTypeParameters(); + wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (!wasArrow) { + _util.unexpected.call(void 0, ); + } + + return wasArrow; +} exports.tsParseMaybeAssignWithJSX = tsParseMaybeAssignWithJSX; + + function tsParseMaybeAssignWithoutJSX(noIn, isWithinParens) { + if (!_index.match.call(void 0, _types.TokenType.lessThan)) { + return _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + } + + const snapshot = _base.state.snapshot(); + // This is similar to TypeScript's `tryParseParenthesizedArrowFunctionExpression`. + tsParseTypeParameters(); + const wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (!wasArrow) { + _util.unexpected.call(void 0, ); + } + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } else { + return wasArrow; + } + + // Try parsing a type cast instead of an arrow function. + // This will start with a type assertion (via parseMaybeUnary). + // But don't directly call `tsParseTypeAssertion` because we want to handle any binary after it. + return _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); +} exports.tsParseMaybeAssignWithoutJSX = tsParseMaybeAssignWithoutJSX; + + function tsParseArrow() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + // This is different from how the TS parser does it. + // TS uses lookahead. Babylon parses it as a parenthesized expression and converts. + const snapshot = _base.state.snapshot(); + + tsParseTypeOrTypePredicateAnnotation(_types.TokenType.colon); + if (_util.canInsertSemicolon.call(void 0, )) _util.unexpected.call(void 0, ); + if (!_index.match.call(void 0, _types.TokenType.arrow)) _util.unexpected.call(void 0, ); + + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } + } + return _index.eat.call(void 0, _types.TokenType.arrow); +} exports.tsParseArrow = tsParseArrow; + +// Allow type annotations inside of a parameter list. + function tsParseAssignableListItemTypes() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.eat.call(void 0, _types.TokenType.question); + tsTryParseTypeAnnotation(); + _index.popTypeContext.call(void 0, oldIsType); +} exports.tsParseAssignableListItemTypes = tsParseAssignableListItemTypes; + + function tsParseMaybeDecoratorArguments() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeArguments(); + } + _statement.baseParseMaybeDecoratorArguments.call(void 0, ); +} exports.tsParseMaybeDecoratorArguments = tsParseMaybeDecoratorArguments; diff --git a/node_modules/sucrase/dist/parser/plugins/typescript.mjs b/node_modules/sucrase/dist/parser/plugins/typescript.mjs new file mode 100644 index 00000000..eedbded2 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/typescript.mjs @@ -0,0 +1,1365 @@ +import { + eat, + lookaheadType, + lookaheadTypeAndKeyword, + match, + next, + popTypeContext, + pushTypeContext, +} from "../tokenizer/index"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {TokenType, TokenType as tt} from "../tokenizer/types"; +import {isJSXEnabled, state} from "../traverser/base"; +import { + atPossibleAsync, + baseParseMaybeAssign, + baseParseSubscript, + parseCallExpressionArguments, + parseExprAtom, + parseExpression, + parseFunctionBody, + parseIdentifier, + parseLiteral, + parseMaybeAssign, + parseMaybeUnary, + parsePropertyName, + parseTemplate, + +} from "../traverser/expression"; +import {parseBindingIdentifier, parseBindingList, parseImportedIdentifier} from "../traverser/lval"; +import { + baseParseMaybeDecoratorArguments, + parseBlockBody, + parseClass, + parseClassProperty, + parseClassPropertyName, + parseFunction, + parseFunctionParams, + parsePostMemberNameModifiers, + parseStatement, + parseVarStatement, +} from "../traverser/statement"; +import { + canInsertSemicolon, + eatContextual, + expect, + expectContextual, + hasPrecedingLineBreak, + isContextual, + isLineTerminator, + isLookaheadContextual, + semicolon, + unexpected, +} from "../traverser/util"; +import {nextJSXTagToken} from "./jsx"; + +function tsIsIdentifier() { + // TODO: actually a bit more complex in TypeScript, but shouldn't matter. + // See https://github.com/Microsoft/TypeScript/issues/15008 + return match(tt.name); +} + +function tsNextTokenCanFollowModifier() { + // Note: TypeScript's implementation is much more complicated because + // more things are considered modifiers there. + // This implementation only handles modifiers not handled by babylon itself. And "static". + // TODO: Would be nice to avoid lookahead. Want a hasLineBreakUpNext() method... + const snapshot = state.snapshot(); + + next(); + const canFollowModifier = + !hasPrecedingLineBreak() && + !match(tt.parenL) && + !match(tt.parenR) && + !match(tt.colon) && + !match(tt.eq) && + !match(tt.question) && + !match(tt.bang); + + if (canFollowModifier) { + return true; + } else { + state.restoreFromSnapshot(snapshot); + return false; + } +} + +/** Parses a modifier matching one the given modifier names. */ +export function tsParseModifier( + allowedModifiers, +) { + if (!match(tt.name)) { + return null; + } + + const modifier = state.contextualKeyword; + if (allowedModifiers.indexOf(modifier) !== -1 && tsNextTokenCanFollowModifier()) { + switch (modifier) { + case ContextualKeyword._readonly: + state.tokens[state.tokens.length - 1].type = tt._readonly; + break; + case ContextualKeyword._abstract: + state.tokens[state.tokens.length - 1].type = tt._abstract; + break; + case ContextualKeyword._static: + state.tokens[state.tokens.length - 1].type = tt._static; + break; + case ContextualKeyword._public: + state.tokens[state.tokens.length - 1].type = tt._public; + break; + case ContextualKeyword._private: + state.tokens[state.tokens.length - 1].type = tt._private; + break; + case ContextualKeyword._protected: + state.tokens[state.tokens.length - 1].type = tt._protected; + break; + default: + break; + } + return modifier; + } + return null; +} + +function tsParseEntityName() { + parseIdentifier(); + while (eat(tt.dot)) { + parseIdentifier(); + } +} + +function tsParseTypeReference() { + tsParseEntityName(); + if (!hasPrecedingLineBreak() && match(tt.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseThisTypePredicate() { + next(); + tsParseTypeAnnotation(); +} + +function tsParseThisTypeNode() { + next(); +} + +function tsParseTypeQuery() { + expect(tt._typeof); + if (match(tt._import)) { + tsParseImportType(); + } else { + tsParseEntityName(); + } +} + +function tsParseImportType() { + expect(tt._import); + expect(tt.parenL); + expect(tt.string); + expect(tt.parenR); + if (eat(tt.dot)) { + tsParseEntityName(); + } + if (match(tt.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseTypeParameter() { + parseIdentifier(); + if (eat(tt._extends)) { + tsParseType(); + } + if (eat(tt.eq)) { + tsParseType(); + } +} + +export function tsTryParseTypeParameters() { + if (match(tt.lessThan)) { + tsParseTypeParameters(); + } +} + +function tsParseTypeParameters() { + const oldIsType = pushTypeContext(0); + if (match(tt.lessThan) || match(tt.typeParameterStart)) { + next(); + } else { + unexpected(); + } + + while (!eat(tt.greaterThan) && !state.error) { + tsParseTypeParameter(); + eat(tt.comma); + } + popTypeContext(oldIsType); +} + +// Note: In TypeScript implementation we must provide `yieldContext` and `awaitContext`, +// but here it's always false, because this is only used for types. +function tsFillSignature(returnToken) { + // Arrow fns *must* have return token (`=>`). Normal functions can omit it. + const returnTokenRequired = returnToken === tt.arrow; + tsTryParseTypeParameters(); + expect(tt.parenL); + // Create a scope even though we're doing type parsing so we don't accidentally + // treat params as top-level bindings. + state.scopeDepth++; + tsParseBindingListForSignature(false /* isBlockScope */); + state.scopeDepth--; + if (returnTokenRequired) { + tsParseTypeOrTypePredicateAnnotation(returnToken); + } else if (match(returnToken)) { + tsParseTypeOrTypePredicateAnnotation(returnToken); + } +} + +function tsParseBindingListForSignature(isBlockScope) { + parseBindingList(tt.parenR, isBlockScope); +} + +function tsParseTypeMemberSemicolon() { + if (!eat(tt.comma)) { + semicolon(); + } +} + +var SignatureMemberKind; (function (SignatureMemberKind) { + const TSCallSignatureDeclaration = 0; SignatureMemberKind[SignatureMemberKind["TSCallSignatureDeclaration"] = TSCallSignatureDeclaration] = "TSCallSignatureDeclaration"; + const TSConstructSignatureDeclaration = TSCallSignatureDeclaration + 1; SignatureMemberKind[SignatureMemberKind["TSConstructSignatureDeclaration"] = TSConstructSignatureDeclaration] = "TSConstructSignatureDeclaration"; +})(SignatureMemberKind || (SignatureMemberKind = {})); + +function tsParseSignatureMember(kind) { + if (kind === SignatureMemberKind.TSConstructSignatureDeclaration) { + expect(tt._new); + } + tsFillSignature(tt.colon); + tsParseTypeMemberSemicolon(); +} + +function tsIsUnambiguouslyIndexSignature() { + const snapshot = state.snapshot(); + next(); // Skip '{' + const isIndexSignature = eat(tt.name) && match(tt.colon); + state.restoreFromSnapshot(snapshot); + return isIndexSignature; +} + +function tsTryParseIndexSignature() { + if (!(match(tt.bracketL) && tsIsUnambiguouslyIndexSignature())) { + return false; + } + + const oldIsType = pushTypeContext(0); + + expect(tt.bracketL); + parseIdentifier(); + tsParseTypeAnnotation(); + expect(tt.bracketR); + + tsTryParseTypeAnnotation(); + tsParseTypeMemberSemicolon(); + + popTypeContext(oldIsType); + return true; +} + +function tsParsePropertyOrMethodSignature(isReadonly) { + parsePropertyName(-1 /* Types don't need context IDs. */); + eat(tt.question); + + if (!isReadonly && (match(tt.parenL) || match(tt.lessThan))) { + tsFillSignature(tt.colon); + tsParseTypeMemberSemicolon(); + } else { + tsTryParseTypeAnnotation(); + tsParseTypeMemberSemicolon(); + } +} + +function tsParseTypeMember() { + if (match(tt.parenL) || match(tt.lessThan)) { + tsParseSignatureMember(SignatureMemberKind.TSCallSignatureDeclaration); + return; + } + if (match(tt._new) && tsIsStartOfConstructSignature()) { + tsParseSignatureMember(SignatureMemberKind.TSConstructSignatureDeclaration); + return; + } + const readonly = !!tsParseModifier([ContextualKeyword._readonly]); + + const found = tsTryParseIndexSignature(); + if (found) { + return; + } + tsParsePropertyOrMethodSignature(readonly); +} + +function tsIsStartOfConstructSignature() { + const lookahead = lookaheadType(); + return lookahead === tt.parenL || lookahead === tt.lessThan; +} + +function tsParseTypeLiteral() { + tsParseObjectTypeMembers(); +} + +function tsParseObjectTypeMembers() { + expect(tt.braceL); + while (!eat(tt.braceR) && !state.error) { + tsParseTypeMember(); + } +} + +function tsLookaheadIsStartOfMappedType() { + const snapshot = state.snapshot(); + const isStartOfMappedType = tsIsStartOfMappedType(); + state.restoreFromSnapshot(snapshot); + return isStartOfMappedType; +} + +function tsIsStartOfMappedType() { + next(); + if (eat(tt.plus) || eat(tt.minus)) { + return isContextual(ContextualKeyword._readonly); + } + if (isContextual(ContextualKeyword._readonly)) { + next(); + } + if (!match(tt.bracketL)) { + return false; + } + next(); + if (!tsIsIdentifier()) { + return false; + } + next(); + return match(tt._in); +} + +function tsParseMappedTypeParameter() { + parseIdentifier(); + expect(tt._in); + tsParseType(); +} + +function tsParseMappedType() { + expect(tt.braceL); + if (match(tt.plus) || match(tt.minus)) { + next(); + expectContextual(ContextualKeyword._readonly); + } else { + eatContextual(ContextualKeyword._readonly); + } + expect(tt.bracketL); + tsParseMappedTypeParameter(); + expect(tt.bracketR); + if (match(tt.plus) || match(tt.minus)) { + next(); + expect(tt.question); + } else { + eat(tt.question); + } + tsTryParseType(); + semicolon(); + expect(tt.braceR); +} + +function tsParseTupleType() { + expect(tt.bracketL); + while (!eat(tt.bracketR) && !state.error) { + tsParseTupleElementType(); + eat(tt.comma); + } +} + +function tsParseTupleElementType() { + // parses `...TsType[]` + if (eat(tt.ellipsis)) { + tsParseType(); + return; + } + // parses `TsType?` + tsParseType(); + eat(tt.question); +} + +function tsParseParenthesizedType() { + expect(tt.parenL); + tsParseType(); + expect(tt.parenR); +} + +var FunctionType; (function (FunctionType) { + const TSFunctionType = 0; FunctionType[FunctionType["TSFunctionType"] = TSFunctionType] = "TSFunctionType"; + const TSConstructorType = TSFunctionType + 1; FunctionType[FunctionType["TSConstructorType"] = TSConstructorType] = "TSConstructorType"; +})(FunctionType || (FunctionType = {})); + +function tsParseFunctionOrConstructorType(type) { + if (type === FunctionType.TSConstructorType) { + expect(tt._new); + } + tsFillSignature(tt.arrow); +} + +function tsParseNonArrayType() { + switch (state.type) { + case tt.name: + tsParseTypeReference(); + return; + case tt._void: + case tt._null: + next(); + return; + case tt.string: + case tt.num: + case tt._true: + case tt._false: + parseLiteral(); + return; + case tt.minus: + next(); + parseLiteral(); + return; + case tt._this: { + tsParseThisTypeNode(); + if (isContextual(ContextualKeyword._is) && !hasPrecedingLineBreak()) { + tsParseThisTypePredicate(); + } + return; + } + case tt._typeof: + tsParseTypeQuery(); + return; + case tt._import: + tsParseImportType(); + return; + case tt.braceL: + if (tsLookaheadIsStartOfMappedType()) { + tsParseMappedType(); + } else { + tsParseTypeLiteral(); + } + return; + case tt.bracketL: + tsParseTupleType(); + return; + case tt.parenL: + tsParseParenthesizedType(); + return; + case tt.backQuote: + parseTemplate(); + return; + default: + if (state.type & TokenType.IS_KEYWORD) { + next(); + state.tokens[state.tokens.length - 1].type = tt.name; + return; + } + break; + } + + unexpected(); +} + +function tsParseArrayTypeOrHigher() { + tsParseNonArrayType(); + while (!hasPrecedingLineBreak() && eat(tt.bracketL)) { + if (!eat(tt.bracketR)) { + // If we hit ] immediately, this is an array type, otherwise it's an indexed access type. + tsParseType(); + expect(tt.bracketR); + } + } +} + +function tsParseInferType() { + expectContextual(ContextualKeyword._infer); + parseIdentifier(); +} + +function tsParseTypeOperatorOrHigher() { + if ( + isContextual(ContextualKeyword._keyof) || + isContextual(ContextualKeyword._unique) || + isContextual(ContextualKeyword._readonly) + ) { + next(); + tsParseTypeOperatorOrHigher(); + } else if (isContextual(ContextualKeyword._infer)) { + tsParseInferType(); + } else { + tsParseArrayTypeOrHigher(); + } +} + +function tsParseIntersectionTypeOrHigher() { + eat(tt.bitwiseAND); + tsParseTypeOperatorOrHigher(); + if (match(tt.bitwiseAND)) { + while (eat(tt.bitwiseAND)) { + tsParseTypeOperatorOrHigher(); + } + } +} + +function tsParseUnionTypeOrHigher() { + eat(tt.bitwiseOR); + tsParseIntersectionTypeOrHigher(); + if (match(tt.bitwiseOR)) { + while (eat(tt.bitwiseOR)) { + tsParseIntersectionTypeOrHigher(); + } + } +} + +function tsIsStartOfFunctionType() { + if (match(tt.lessThan)) { + return true; + } + return match(tt.parenL) && tsLookaheadIsUnambiguouslyStartOfFunctionType(); +} + +function tsSkipParameterStart() { + if (match(tt.name) || match(tt._this)) { + next(); + return true; + } + // If this is a possible array/object destructure, walk to the matching bracket/brace. + // The next token after will tell us definitively whether this is a function param. + if (match(tt.braceL) || match(tt.bracketL)) { + let depth = 1; + next(); + while (depth > 0 && !state.error) { + if (match(tt.braceL) || match(tt.bracketL)) { + depth++; + } else if (match(tt.braceR) || match(tt.bracketR)) { + depth--; + } + next(); + } + return true; + } + return false; +} + +function tsLookaheadIsUnambiguouslyStartOfFunctionType() { + const snapshot = state.snapshot(); + const isUnambiguouslyStartOfFunctionType = tsIsUnambiguouslyStartOfFunctionType(); + state.restoreFromSnapshot(snapshot); + return isUnambiguouslyStartOfFunctionType; +} + +function tsIsUnambiguouslyStartOfFunctionType() { + next(); + if (match(tt.parenR) || match(tt.ellipsis)) { + // ( ) + // ( ... + return true; + } + if (tsSkipParameterStart()) { + if (match(tt.colon) || match(tt.comma) || match(tt.question) || match(tt.eq)) { + // ( xxx : + // ( xxx , + // ( xxx ? + // ( xxx = + return true; + } + if (match(tt.parenR)) { + next(); + if (match(tt.arrow)) { + // ( xxx ) => + return true; + } + } + } + return false; +} + +function tsParseTypeOrTypePredicateAnnotation(returnToken) { + const oldIsType = pushTypeContext(0); + expect(returnToken); + tsParseTypePredicatePrefix(); + // Regardless of whether we found an "is" token, there's now just a regular type in front of + // us. + tsParseType(); + popTypeContext(oldIsType); +} + +function tsTryParseTypeOrTypePredicateAnnotation() { + if (match(tt.colon)) { + tsParseTypeOrTypePredicateAnnotation(tt.colon); + } +} + +export function tsTryParseTypeAnnotation() { + if (match(tt.colon)) { + tsParseTypeAnnotation(); + } +} + +function tsTryParseType() { + if (eat(tt.colon)) { + tsParseType(); + } +} + +function tsParseTypePredicatePrefix() { + const snapshot = state.snapshot(); + parseIdentifier(); + if (isContextual(ContextualKeyword._is) && !hasPrecedingLineBreak()) { + next(); + } else { + state.restoreFromSnapshot(snapshot); + } +} + +export function tsParseTypeAnnotation() { + const oldIsType = pushTypeContext(0); + expect(tt.colon); + tsParseType(); + popTypeContext(oldIsType); +} + +export function tsParseType() { + tsParseNonConditionalType(); + if (hasPrecedingLineBreak() || !eat(tt._extends)) { + return; + } + // extends type + tsParseNonConditionalType(); + expect(tt.question); + // true type + tsParseType(); + expect(tt.colon); + // false type + tsParseType(); +} + +export function tsParseNonConditionalType() { + if (tsIsStartOfFunctionType()) { + tsParseFunctionOrConstructorType(FunctionType.TSFunctionType); + return; + } + if (match(tt._new)) { + // As in `new () => Date` + tsParseFunctionOrConstructorType(FunctionType.TSConstructorType); + return; + } + tsParseUnionTypeOrHigher(); +} + +export function tsParseTypeAssertion() { + const oldIsType = pushTypeContext(1); + tsParseType(); + expect(tt.greaterThan); + popTypeContext(oldIsType); + parseMaybeUnary(); +} + +export function tsTryParseJSXTypeArgument() { + if (eat(tt.jsxTagStart)) { + state.tokens[state.tokens.length - 1].type = tt.typeParameterStart; + const oldIsType = pushTypeContext(1); + while (!match(tt.greaterThan) && !state.error) { + tsParseType(); + eat(tt.comma); + } + // Process >, but the one after needs to be parsed JSX-style. + nextJSXTagToken(); + popTypeContext(oldIsType); + } +} + +function tsParseHeritageClause() { + while (!match(tt.braceL) && !state.error) { + tsParseExpressionWithTypeArguments(); + eat(tt.comma); + } +} + +function tsParseExpressionWithTypeArguments() { + // Note: TS uses parseLeftHandSideExpressionOrHigher, + // then has grammar errors later if it's not an EntityName. + tsParseEntityName(); + if (match(tt.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseInterfaceDeclaration() { + parseBindingIdentifier(false); + tsTryParseTypeParameters(); + if (eat(tt._extends)) { + tsParseHeritageClause(); + } + tsParseObjectTypeMembers(); +} + +function tsParseTypeAliasDeclaration() { + parseBindingIdentifier(false); + tsTryParseTypeParameters(); + expect(tt.eq); + tsParseType(); + semicolon(); +} + +function tsParseEnumMember() { + // Computed property names are grammar errors in an enum, so accept just string literal or identifier. + if (match(tt.string)) { + parseLiteral(); + } else { + parseIdentifier(); + } + if (eat(tt.eq)) { + const eqIndex = state.tokens.length - 1; + parseMaybeAssign(); + state.tokens[eqIndex].rhsEndIndex = state.tokens.length; + } +} + +function tsParseEnumDeclaration() { + parseBindingIdentifier(false); + expect(tt.braceL); + while (!eat(tt.braceR) && !state.error) { + tsParseEnumMember(); + eat(tt.comma); + } +} + +function tsParseModuleBlock() { + expect(tt.braceL); + parseBlockBody(/* end */ tt.braceR); +} + +function tsParseModuleOrNamespaceDeclaration() { + parseBindingIdentifier(false); + if (eat(tt.dot)) { + tsParseModuleOrNamespaceDeclaration(); + } else { + tsParseModuleBlock(); + } +} + +function tsParseAmbientExternalModuleDeclaration() { + if (isContextual(ContextualKeyword._global)) { + parseIdentifier(); + } else if (match(tt.string)) { + parseExprAtom(); + } else { + unexpected(); + } + + if (match(tt.braceL)) { + tsParseModuleBlock(); + } else { + semicolon(); + } +} + +export function tsParseImportEqualsDeclaration() { + parseImportedIdentifier(); + expect(tt.eq); + tsParseModuleReference(); + semicolon(); +} + +function tsIsExternalModuleReference() { + return isContextual(ContextualKeyword._require) && lookaheadType() === tt.parenL; +} + +function tsParseModuleReference() { + if (tsIsExternalModuleReference()) { + tsParseExternalModuleReference(); + } else { + tsParseEntityName(); + } +} + +function tsParseExternalModuleReference() { + expectContextual(ContextualKeyword._require); + expect(tt.parenL); + if (!match(tt.string)) { + unexpected(); + } + parseLiteral(); + expect(tt.parenR); +} + +// Utilities + +// Returns true if a statement matched. +function tsTryParseDeclare() { + if (isLineTerminator()) { + return false; + } + switch (state.type) { + case tt._function: { + const oldIsType = pushTypeContext(1); + next(); + // We don't need to precisely get the function start here, since it's only used to mark + // the function as a type if it's bodiless, and it's already a type here. + const functionStart = state.start; + parseFunction(functionStart, /* isStatement */ true); + popTypeContext(oldIsType); + return true; + } + case tt._class: { + const oldIsType = pushTypeContext(1); + parseClass(/* isStatement */ true, /* optionalId */ false); + popTypeContext(oldIsType); + return true; + } + case tt._const: { + if (match(tt._const) && isLookaheadContextual(ContextualKeyword._enum)) { + const oldIsType = pushTypeContext(1); + // `const enum = 0;` not allowed because "enum" is a strict mode reserved word. + expect(tt._const); + expectContextual(ContextualKeyword._enum); + state.tokens[state.tokens.length - 1].type = tt._enum; + tsParseEnumDeclaration(); + popTypeContext(oldIsType); + return true; + } + } + // falls through + case tt._var: + case tt._let: { + const oldIsType = pushTypeContext(1); + parseVarStatement(state.type); + popTypeContext(oldIsType); + return true; + } + case tt.name: { + const oldIsType = pushTypeContext(1); + const contextualKeyword = state.contextualKeyword; + let matched = false; + if (contextualKeyword === ContextualKeyword._global) { + tsParseAmbientExternalModuleDeclaration(); + matched = true; + } else { + matched = tsParseDeclaration(contextualKeyword, /* isBeforeToken */ true); + } + popTypeContext(oldIsType); + return matched; + } + default: + return false; + } +} + +// Note: this won't be called unless the keyword is allowed in `shouldParseExportDeclaration`. +// Returns true if it matched a declaration. +function tsTryParseExportDeclaration() { + return tsParseDeclaration(state.contextualKeyword, /* isBeforeToken */ true); +} + +// Returns true if it matched a statement. +function tsParseExpressionStatement(contextualKeyword) { + switch (contextualKeyword) { + case ContextualKeyword._declare: { + const declareTokenIndex = state.tokens.length - 1; + const matched = tsTryParseDeclare(); + if (matched) { + state.tokens[declareTokenIndex].type = tt._declare; + return true; + } + break; + } + case ContextualKeyword._global: + // `global { }` (with no `declare`) may appear inside an ambient module declaration. + // Would like to use tsParseAmbientExternalModuleDeclaration here, but already ran past "global". + if (match(tt.braceL)) { + tsParseModuleBlock(); + return true; + } + break; + + default: + return tsParseDeclaration(contextualKeyword, /* isBeforeToken */ false); + } + return false; +} + +// Common to tsTryParseDeclare, tsTryParseExportDeclaration, and tsParseExpressionStatement. +// Returns true if it matched a declaration. +function tsParseDeclaration(contextualKeyword, isBeforeToken) { + switch (contextualKeyword) { + case ContextualKeyword._abstract: + if (tsCheckLineTerminatorAndMatch(tt._class, isBeforeToken)) { + if (isBeforeToken) next(); + state.tokens[state.tokens.length - 1].type = tt._abstract; + parseClass(/* isStatement */ true, /* optionalId */ false); + return true; + } + break; + + case ContextualKeyword._enum: + if (tsCheckLineTerminatorAndMatch(tt.name, isBeforeToken)) { + if (isBeforeToken) next(); + state.tokens[state.tokens.length - 1].type = tt._enum; + tsParseEnumDeclaration(); + return true; + } + break; + + case ContextualKeyword._interface: + if (tsCheckLineTerminatorAndMatch(tt.name, isBeforeToken)) { + // `next` is true in "export" and "declare" contexts, so we want to remove that token + // as well. + const oldIsType = pushTypeContext(1); + if (isBeforeToken) next(); + tsParseInterfaceDeclaration(); + popTypeContext(oldIsType); + return true; + } + break; + + case ContextualKeyword._module: + if (isBeforeToken) next(); + if (match(tt.string)) { + const oldIsType = pushTypeContext(isBeforeToken ? 2 : 1); + tsParseAmbientExternalModuleDeclaration(); + popTypeContext(oldIsType); + return true; + } else if (tsCheckLineTerminatorAndMatch(tt.name, isBeforeToken)) { + const oldIsType = pushTypeContext(isBeforeToken ? 2 : 1); + if (isBeforeToken) next(); + tsParseModuleOrNamespaceDeclaration(); + popTypeContext(oldIsType); + return true; + } + break; + + case ContextualKeyword._namespace: + if (tsCheckLineTerminatorAndMatch(tt.name, isBeforeToken)) { + const oldIsType = pushTypeContext(1); + if (isBeforeToken) next(); + tsParseModuleOrNamespaceDeclaration(); + popTypeContext(oldIsType); + return true; + } + break; + + case ContextualKeyword._type: + if (tsCheckLineTerminatorAndMatch(tt.name, isBeforeToken)) { + const oldIsType = pushTypeContext(1); + if (isBeforeToken) next(); + tsParseTypeAliasDeclaration(); + popTypeContext(oldIsType); + return true; + } + break; + + default: + break; + } + return false; +} + +function tsCheckLineTerminatorAndMatch(tokenType, isBeforeToken) { + return !isLineTerminator() && (isBeforeToken || match(tokenType)); +} + +// Returns true if there was a generic async arrow function. +function tsTryParseGenericAsyncArrowFunction() { + const snapshot = state.snapshot(); + + tsParseTypeParameters(); + parseFunctionParams(); + tsTryParseTypeOrTypePredicateAnnotation(); + expect(tt.arrow); + + if (state.error) { + state.restoreFromSnapshot(snapshot); + return false; + } + + parseFunctionBody(true); + return true; +} + +function tsParseTypeArguments() { + const oldIsType = pushTypeContext(0); + expect(tt.lessThan); + while (!eat(tt.greaterThan) && !state.error) { + tsParseType(); + eat(tt.comma); + } + popTypeContext(oldIsType); +} + +export function tsIsDeclarationStart() { + if (match(tt.name)) { + switch (state.contextualKeyword) { + case ContextualKeyword._abstract: + case ContextualKeyword._declare: + case ContextualKeyword._enum: + case ContextualKeyword._interface: + case ContextualKeyword._module: + case ContextualKeyword._namespace: + case ContextualKeyword._type: + return true; + default: + break; + } + } + + return false; +} + +// ====================================================== +// OVERRIDES +// ====================================================== + +export function tsParseFunctionBodyAndFinish(functionStart, funcContextId) { + // For arrow functions, `parseArrow` handles the return type itself. + if (match(tt.colon)) { + tsParseTypeOrTypePredicateAnnotation(tt.colon); + } + + // The original code checked the node type to make sure this function type allows a missing + // body, but we skip that to avoid sending around the node type. We instead just use the + // allowExpressionBody boolean to make sure it's not an arrow function. + if (!match(tt.braceL) && isLineTerminator()) { + // Retroactively mark the function declaration as a type. + let i = state.tokens.length - 1; + while ( + i >= 0 && + (state.tokens[i].start >= functionStart || + state.tokens[i].type === tt._default || + state.tokens[i].type === tt._export) + ) { + state.tokens[i].isType = true; + i--; + } + return; + } + + parseFunctionBody(false, funcContextId); +} + +export function tsParseSubscript(startPos, noCalls, stopState) { + if (!hasPrecedingLineBreak() && eat(tt.bang)) { + state.tokens[state.tokens.length - 1].type = tt.nonNullAssertion; + return; + } + + if (match(tt.lessThan)) { + // There are number of things we are going to "maybe" parse, like type arguments on + // tagged template expressions. If any of them fail, walk it back and continue. + const snapshot = state.snapshot(); + + if (!noCalls && atPossibleAsync()) { + // Almost certainly this is a generic async function `async () => ... + // But it might be a call with a type argument `async();` + const asyncArrowFn = tsTryParseGenericAsyncArrowFunction(); + if (asyncArrowFn) { + return; + } + } + tsParseTypeArguments(); + if (!noCalls && eat(tt.parenL)) { + parseCallExpressionArguments(); + } else if (match(tt.backQuote)) { + // Tagged template with a type argument. + parseTemplate(); + } else { + unexpected(); + } + + if (state.error) { + state.restoreFromSnapshot(snapshot); + } else { + return; + } + } + baseParseSubscript(startPos, noCalls, stopState); +} + +export function tsStartParseNewArguments() { + if (match(tt.lessThan)) { + // 99% certain this is `new C();`. But may be `new C < T;`, which is also legal. + const snapshot = state.snapshot(); + + state.type = tt.typeParameterStart; + tsParseTypeArguments(); + if (!match(tt.parenL)) { + unexpected(); + } + + if (state.error) { + state.restoreFromSnapshot(snapshot); + } + } +} + +export function tsTryParseExport() { + if (match(tt._import)) { + // `export import A = B;` + expect(tt._import); + tsParseImportEqualsDeclaration(); + return true; + } else if (eat(tt.eq)) { + // `export = x;` + parseExpression(); + semicolon(); + return true; + } else if (eatContextual(ContextualKeyword._as)) { + // `export as namespace A;` + // See `parseNamespaceExportDeclaration` in TypeScript's own parser + expectContextual(ContextualKeyword._namespace); + parseIdentifier(); + semicolon(); + return true; + } else { + return false; + } +} + +export function tsTryParseExportDefaultExpression() { + if (isContextual(ContextualKeyword._abstract) && lookaheadType() === tt._class) { + state.type = tt._abstract; + next(); // Skip "abstract" + parseClass(true, true); + return true; + } + if (isContextual(ContextualKeyword._interface)) { + // Make sure "export default" are considered type tokens so the whole thing is removed. + const oldIsType = pushTypeContext(2); + tsParseDeclaration(ContextualKeyword._interface, true); + popTypeContext(oldIsType); + return true; + } + return false; +} + +export function tsTryParseStatementContent() { + if (state.type === tt._const) { + const ahead = lookaheadTypeAndKeyword(); + if (ahead.type === tt.name && ahead.contextualKeyword === ContextualKeyword._enum) { + expect(tt._const); + expectContextual(ContextualKeyword._enum); + state.tokens[state.tokens.length - 1].type = tt._enum; + tsParseEnumDeclaration(); + return true; + } + } + return false; +} + +export function tsParseAccessModifier() { + tsParseModifier([ + ContextualKeyword._public, + ContextualKeyword._protected, + ContextualKeyword._private, + ]); +} + +export function tsTryParseClassMemberWithIsStatic( + isStatic, + classContextId, +) { + let isAbstract = false; + let isReadonly = false; + + const mod = tsParseModifier([ContextualKeyword._abstract, ContextualKeyword._readonly]); + switch (mod) { + case ContextualKeyword._readonly: + isReadonly = true; + isAbstract = !!tsParseModifier([ContextualKeyword._abstract]); + break; + case ContextualKeyword._abstract: + isAbstract = true; + isReadonly = !!tsParseModifier([ContextualKeyword._readonly]); + break; + default: + break; + } + + // We no longer check for public/private/etc, but tsTryParseIndexSignature should just return + // false in that case for valid code. + if (!isAbstract && !isStatic) { + const found = tsTryParseIndexSignature(); + if (found) { + return true; + } + } + + if (isReadonly) { + // Must be a property (if not an index signature). + parseClassPropertyName(classContextId); + parsePostMemberNameModifiers(); + parseClassProperty(); + return true; + } + return false; +} + +// Note: The reason we do this in `parseIdentifierStatement` and not `parseStatement` +// is that e.g. `type()` is valid JS, so we must try parsing that first. +// If it's really a type, we will parse `type` as the statement, and can correct it here +// by parsing the rest. +export function tsParseIdentifierStatement(contextualKeyword) { + const matched = tsParseExpressionStatement(contextualKeyword); + if (!matched) { + semicolon(); + } +} + +export function tsParseExportDeclaration() { + // "export declare" is equivalent to just "export". + const isDeclare = eatContextual(ContextualKeyword._declare); + if (isDeclare) { + state.tokens[state.tokens.length - 1].type = tt._declare; + } + + let matchedDeclaration = false; + if (match(tt.name)) { + if (isDeclare) { + const oldIsType = pushTypeContext(2); + matchedDeclaration = tsTryParseExportDeclaration(); + popTypeContext(oldIsType); + } else { + matchedDeclaration = tsTryParseExportDeclaration(); + } + } + if (!matchedDeclaration) { + if (isDeclare) { + const oldIsType = pushTypeContext(2); + parseStatement(true); + popTypeContext(oldIsType); + } else { + parseStatement(true); + } + } +} + +export function tsAfterParseClassSuper(hasSuper) { + if (hasSuper && match(tt.lessThan)) { + tsParseTypeArguments(); + } + if (eatContextual(ContextualKeyword._implements)) { + state.tokens[state.tokens.length - 1].type = tt._implements; + const oldIsType = pushTypeContext(1); + tsParseHeritageClause(); + popTypeContext(oldIsType); + } +} + +export function tsStartParseObjPropValue() { + tsTryParseTypeParameters(); +} + +export function tsStartParseFunctionParams() { + tsTryParseTypeParameters(); +} + +// `let x: number;` +export function tsAfterParseVarHead() { + const oldIsType = pushTypeContext(0); + eat(tt.bang); + tsTryParseTypeAnnotation(); + popTypeContext(oldIsType); +} + +// parse the return type of an async arrow function - let foo = (async (): number => {}); +export function tsStartParseAsyncArrowFromCallExpression() { + if (match(tt.colon)) { + tsParseTypeAnnotation(); + } +} + +// Returns true if the expression was an arrow function. +export function tsParseMaybeAssign(noIn, isWithinParens) { + // Note: When the JSX plugin is on, type assertions (` x`) aren't valid syntax. + if (isJSXEnabled) { + return tsParseMaybeAssignWithJSX(noIn, isWithinParens); + } else { + return tsParseMaybeAssignWithoutJSX(noIn, isWithinParens); + } +} + +export function tsParseMaybeAssignWithJSX(noIn, isWithinParens) { + if (!match(tt.lessThan)) { + return baseParseMaybeAssign(noIn, isWithinParens); + } + + // Prefer to parse JSX if possible. But may be an arrow fn. + const snapshot = state.snapshot(); + let wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (state.error) { + state.restoreFromSnapshot(snapshot); + } else { + return wasArrow; + } + + // Otherwise, try as type-parameterized arrow function. + state.type = tt.typeParameterStart; + // This is similar to TypeScript's `tryParseParenthesizedArrowFunctionExpression`. + tsParseTypeParameters(); + wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (!wasArrow) { + unexpected(); + } + + return wasArrow; +} + +export function tsParseMaybeAssignWithoutJSX(noIn, isWithinParens) { + if (!match(tt.lessThan)) { + return baseParseMaybeAssign(noIn, isWithinParens); + } + + const snapshot = state.snapshot(); + // This is similar to TypeScript's `tryParseParenthesizedArrowFunctionExpression`. + tsParseTypeParameters(); + const wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (!wasArrow) { + unexpected(); + } + if (state.error) { + state.restoreFromSnapshot(snapshot); + } else { + return wasArrow; + } + + // Try parsing a type cast instead of an arrow function. + // This will start with a type assertion (via parseMaybeUnary). + // But don't directly call `tsParseTypeAssertion` because we want to handle any binary after it. + return baseParseMaybeAssign(noIn, isWithinParens); +} + +export function tsParseArrow() { + if (match(tt.colon)) { + // This is different from how the TS parser does it. + // TS uses lookahead. Babylon parses it as a parenthesized expression and converts. + const snapshot = state.snapshot(); + + tsParseTypeOrTypePredicateAnnotation(tt.colon); + if (canInsertSemicolon()) unexpected(); + if (!match(tt.arrow)) unexpected(); + + if (state.error) { + state.restoreFromSnapshot(snapshot); + } + } + return eat(tt.arrow); +} + +// Allow type annotations inside of a parameter list. +export function tsParseAssignableListItemTypes() { + const oldIsType = pushTypeContext(0); + eat(tt.question); + tsTryParseTypeAnnotation(); + popTypeContext(oldIsType); +} + +export function tsParseMaybeDecoratorArguments() { + if (match(tt.lessThan)) { + tsParseTypeArguments(); + } + baseParseMaybeDecoratorArguments(); +} diff --git a/node_modules/sucrase/dist/parser/tokenizer/index.d.ts b/node_modules/sucrase/dist/parser/tokenizer/index.d.ts new file mode 100644 index 00000000..417d9ffd --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/index.d.ts @@ -0,0 +1,55 @@ +import { ContextualKeyword } from "./keywords"; +import { TokenType } from "./types"; +export declare enum IdentifierRole { + Access = 0, + ExportAccess = 1, + TopLevelDeclaration = 2, + FunctionScopedDeclaration = 3, + BlockScopedDeclaration = 4, + ObjectShorthandTopLevelDeclaration = 5, + ObjectShorthandFunctionScopedDeclaration = 6, + ObjectShorthandBlockScopedDeclaration = 7, + ObjectShorthand = 8, + ImportDeclaration = 9, + ObjectKey = 10, + ImportAccess = 11 +} +export declare function isDeclaration(token: Token): boolean; +export declare function isNonTopLevelDeclaration(token: Token): boolean; +export declare function isTopLevelDeclaration(token: Token): boolean; +export declare function isBlockScopedDeclaration(token: Token): boolean; +export declare function isFunctionScopedDeclaration(token: Token): boolean; +export declare function isObjectShorthandDeclaration(token: Token): boolean; +export declare class Token { + constructor(); + type: TokenType; + contextualKeyword: ContextualKeyword; + start: number; + end: number; + isType: boolean; + identifierRole: IdentifierRole | null; + shadowsGlobal: boolean; + contextId: number | null; + rhsEndIndex: number | null; + isExpression: boolean; +} +export declare function next(): void; +export declare function nextTemplateToken(): void; +export declare function retokenizeSlashAsRegex(): void; +export declare function pushTypeContext(existingTokensInType: number): boolean; +export declare function popTypeContext(oldIsType: boolean): void; +export declare function eat(type: TokenType): boolean; +export declare function match(type: TokenType): boolean; +export declare function lookaheadType(): TokenType; +export declare class TypeAndKeyword { + type: TokenType; + contextualKeyword: ContextualKeyword; + constructor(type: TokenType, contextualKeyword: ContextualKeyword); +} +export declare function lookaheadTypeAndKeyword(): TypeAndKeyword; +export declare function nextToken(): void; +export declare function skipLineComment(startSkip: number): void; +export declare function skipSpace(): void; +export declare function finishToken(type: TokenType, contextualKeyword?: ContextualKeyword): void; +export declare function getTokenFromCode(code: number): void; +export declare function skipWord(): void; diff --git a/node_modules/sucrase/dist/parser/tokenizer/index.js b/node_modules/sucrase/dist/parser/tokenizer/index.js new file mode 100644 index 00000000..890099ec --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/index.js @@ -0,0 +1,858 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});/* eslint max-len: 0 */ + +var _base = require('../traverser/base'); +var _util = require('../traverser/util'); +var _charcodes = require('../util/charcodes'); +var _identifier = require('../util/identifier'); +var _whitespace = require('../util/whitespace'); +var _keywords = require('./keywords'); +var _readWord = require('./readWord'); var _readWord2 = _interopRequireDefault(_readWord); +var _types = require('./types'); + +var IdentifierRole; (function (IdentifierRole) { + const Access = 0; IdentifierRole[IdentifierRole["Access"] = Access] = "Access"; + const ExportAccess = Access + 1; IdentifierRole[IdentifierRole["ExportAccess"] = ExportAccess] = "ExportAccess"; + const TopLevelDeclaration = ExportAccess + 1; IdentifierRole[IdentifierRole["TopLevelDeclaration"] = TopLevelDeclaration] = "TopLevelDeclaration"; + const FunctionScopedDeclaration = TopLevelDeclaration + 1; IdentifierRole[IdentifierRole["FunctionScopedDeclaration"] = FunctionScopedDeclaration] = "FunctionScopedDeclaration"; + const BlockScopedDeclaration = FunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["BlockScopedDeclaration"] = BlockScopedDeclaration] = "BlockScopedDeclaration"; + const ObjectShorthandTopLevelDeclaration = BlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandTopLevelDeclaration"] = ObjectShorthandTopLevelDeclaration] = "ObjectShorthandTopLevelDeclaration"; + const ObjectShorthandFunctionScopedDeclaration = ObjectShorthandTopLevelDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandFunctionScopedDeclaration"] = ObjectShorthandFunctionScopedDeclaration] = "ObjectShorthandFunctionScopedDeclaration"; + const ObjectShorthandBlockScopedDeclaration = ObjectShorthandFunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandBlockScopedDeclaration"] = ObjectShorthandBlockScopedDeclaration] = "ObjectShorthandBlockScopedDeclaration"; + const ObjectShorthand = ObjectShorthandBlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthand"] = ObjectShorthand] = "ObjectShorthand"; + // Any identifier bound in an import statement, e.g. both A and b from + // `import A, * as b from 'A';` + const ImportDeclaration = ObjectShorthand + 1; IdentifierRole[IdentifierRole["ImportDeclaration"] = ImportDeclaration] = "ImportDeclaration"; + const ObjectKey = ImportDeclaration + 1; IdentifierRole[IdentifierRole["ObjectKey"] = ObjectKey] = "ObjectKey"; + // The `foo` in `import {foo as bar} from "./abc";`. + const ImportAccess = ObjectKey + 1; IdentifierRole[IdentifierRole["ImportAccess"] = ImportAccess] = "ImportAccess"; +})(IdentifierRole || (exports.IdentifierRole = IdentifierRole = {})); + + function isDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} exports.isDeclaration = isDeclaration; + + function isNonTopLevelDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} exports.isNonTopLevelDeclaration = isNonTopLevelDeclaration; + + function isTopLevelDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ImportDeclaration + ); +} exports.isTopLevelDeclaration = isTopLevelDeclaration; + + function isBlockScopedDeclaration(token) { + const role = token.identifierRole; + // Treat top-level declarations as block scope since the distinction doesn't matter here. + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} exports.isBlockScopedDeclaration = isBlockScopedDeclaration; + + function isFunctionScopedDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration + ); +} exports.isFunctionScopedDeclaration = isFunctionScopedDeclaration; + + function isObjectShorthandDeclaration(token) { + return ( + token.identifierRole === IdentifierRole.ObjectShorthandTopLevelDeclaration || + token.identifierRole === IdentifierRole.ObjectShorthandBlockScopedDeclaration || + token.identifierRole === IdentifierRole.ObjectShorthandFunctionScopedDeclaration + ); +} exports.isObjectShorthandDeclaration = isObjectShorthandDeclaration; + +// Object type used to represent tokens. Note that normally, tokens +// simply exist as properties on the parser object. This is only +// used for the onToken callback and the external tokenizer. + class Token { + constructor() { + this.type = _base.state.type; + this.contextualKeyword = _base.state.contextualKeyword; + this.start = _base.state.start; + this.end = _base.state.end; + this.isType = _base.state.isType; + this.identifierRole = null; + this.shadowsGlobal = false; + this.contextId = null; + this.rhsEndIndex = null; + this.isExpression = false; + } + + + + + + + + // Initially false for all tokens, then may be computed in a follow-up step that does scope + // analysis. + + + // For assignments, the index of the RHS. For export tokens, the end of the export. + + // For class tokens, records if the class is a class expression or a class statement. + +} exports.Token = Token; + +// ## Tokenizer + +// Move to the next token + function next() { + _base.state.tokens.push(new Token()); + nextToken(); +} exports.next = next; + +// Call instead of next when inside a template, since that needs to be handled differently. + function nextTemplateToken() { + _base.state.tokens.push(new Token()); + _base.state.start = _base.state.pos; + readTmplToken(); +} exports.nextTemplateToken = nextTemplateToken; + +// The tokenizer never parses regexes by default. Instead, the parser is responsible for +// instructing it to parse a regex when we see a slash at the start of an expression. + function retokenizeSlashAsRegex() { + if (_base.state.type === _types.TokenType.assign) { + --_base.state.pos; + } + readRegexp(); +} exports.retokenizeSlashAsRegex = retokenizeSlashAsRegex; + + function pushTypeContext(existingTokensInType) { + for (let i = _base.state.tokens.length - existingTokensInType; i < _base.state.tokens.length; i++) { + _base.state.tokens[i].isType = true; + } + const oldIsType = _base.state.isType; + _base.state.isType = true; + return oldIsType; +} exports.pushTypeContext = pushTypeContext; + + function popTypeContext(oldIsType) { + _base.state.isType = oldIsType; +} exports.popTypeContext = popTypeContext; + + function eat(type) { + if (match(type)) { + next(); + return true; + } else { + return false; + } +} exports.eat = eat; + + function match(type) { + return _base.state.type === type; +} exports.match = match; + + function lookaheadType() { + const snapshot = _base.state.snapshot(); + next(); + const type = _base.state.type; + _base.state.restoreFromSnapshot(snapshot); + return type; +} exports.lookaheadType = lookaheadType; + + class TypeAndKeyword { + + + constructor(type, contextualKeyword) { + this.type = type; + this.contextualKeyword = contextualKeyword; + } +} exports.TypeAndKeyword = TypeAndKeyword; + + function lookaheadTypeAndKeyword() { + const snapshot = _base.state.snapshot(); + next(); + const type = _base.state.type; + const contextualKeyword = _base.state.contextualKeyword; + _base.state.restoreFromSnapshot(snapshot); + return new TypeAndKeyword(type, contextualKeyword); +} exports.lookaheadTypeAndKeyword = lookaheadTypeAndKeyword; + +// Read a single token, updating the parser object's token-related +// properties. + function nextToken() { + skipSpace(); + _base.state.start = _base.state.pos; + if (_base.state.pos >= _base.input.length) { + const tokens = _base.state.tokens; + // We normally run past the end a bit, but if we're way past the end, avoid an infinite loop. + // Also check the token positions rather than the types since sometimes we rewrite the token + // type to something else. + if ( + tokens.length >= 2 && + tokens[tokens.length - 1].start >= _base.input.length && + tokens[tokens.length - 2].start >= _base.input.length + ) { + _util.unexpected.call(void 0, "Unexpectedly reached the end of input."); + } + finishToken(_types.TokenType.eof); + return; + } + readToken(_base.input.charCodeAt(_base.state.pos)); +} exports.nextToken = nextToken; + +function readToken(code) { + // Identifier or keyword. '\uXXXX' sequences are allowed in + // identifiers, so '\' also dispatches to that. + if ( + _identifier.IS_IDENTIFIER_START[code] || + code === _charcodes.charCodes.backslash || + (code === _charcodes.charCodes.atSign && _base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.atSign) + ) { + _readWord2.default.call(void 0, ); + } else { + getTokenFromCode(code); + } +} + +function skipBlockComment() { + while ( + _base.input.charCodeAt(_base.state.pos) !== _charcodes.charCodes.asterisk || + _base.input.charCodeAt(_base.state.pos + 1) !== _charcodes.charCodes.slash + ) { + _base.state.pos++; + if (_base.state.pos > _base.input.length) { + _util.unexpected.call(void 0, "Unterminated comment", _base.state.pos - 2); + return; + } + } + _base.state.pos += 2; +} + + function skipLineComment(startSkip) { + let ch = _base.input.charCodeAt((_base.state.pos += startSkip)); + if (_base.state.pos < _base.input.length) { + while ( + ch !== _charcodes.charCodes.lineFeed && + ch !== _charcodes.charCodes.carriageReturn && + ch !== _charcodes.charCodes.lineSeparator && + ch !== _charcodes.charCodes.paragraphSeparator && + ++_base.state.pos < _base.input.length + ) { + ch = _base.input.charCodeAt(_base.state.pos); + } + } +} exports.skipLineComment = skipLineComment; + +// Called at the start of the parse and after every token. Skips +// whitespace and comments. + function skipSpace() { + while (_base.state.pos < _base.input.length) { + const ch = _base.input.charCodeAt(_base.state.pos); + switch (ch) { + case _charcodes.charCodes.carriageReturn: + if (_base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.lineFeed) { + ++_base.state.pos; + } + + case _charcodes.charCodes.lineFeed: + case _charcodes.charCodes.lineSeparator: + case _charcodes.charCodes.paragraphSeparator: + ++_base.state.pos; + break; + + case _charcodes.charCodes.slash: + switch (_base.input.charCodeAt(_base.state.pos + 1)) { + case _charcodes.charCodes.asterisk: + _base.state.pos += 2; + skipBlockComment(); + break; + + case _charcodes.charCodes.slash: + skipLineComment(2); + break; + + default: + return; + } + break; + + default: + if (_whitespace.IS_WHITESPACE[ch]) { + ++_base.state.pos; + } else { + return; + } + } + } +} exports.skipSpace = skipSpace; + +// Called at the end of every token. Sets various fields, and skips the space after the token, so +// that the next one's `start` will point at the right position. + function finishToken( + type, + contextualKeyword = _keywords.ContextualKeyword.NONE, +) { + _base.state.end = _base.state.pos; + _base.state.type = type; + _base.state.contextualKeyword = contextualKeyword; +} exports.finishToken = finishToken; + +// ### Token reading + +// This is the function that is called to fetch the next token. It +// is somewhat obscure, because it works in character codes rather +// than characters, and because operator parsing has been inlined +// into it. +// +// All in the name of speed. +function readToken_dot() { + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + if (nextChar >= _charcodes.charCodes.digit0 && nextChar <= _charcodes.charCodes.digit9) { + readNumber(true); + return; + } + + const next2 = _base.input.charCodeAt(_base.state.pos + 2); + if (nextChar === _charcodes.charCodes.dot && next2 === _charcodes.charCodes.dot) { + _base.state.pos += 3; + finishToken(_types.TokenType.ellipsis); + } else { + ++_base.state.pos; + finishToken(_types.TokenType.dot); + } +} + +function readToken_slash() { + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, 2); + } else { + finishOp(_types.TokenType.slash, 1); + } +} + +function readToken_mult_modulo(code) { + // '%*' + let tokenType = code === _charcodes.charCodes.asterisk ? _types.TokenType.star : _types.TokenType.modulo; + let width = 1; + let nextChar = _base.input.charCodeAt(_base.state.pos + 1); + + // Exponentiation operator ** + if (code === _charcodes.charCodes.asterisk && nextChar === _charcodes.charCodes.asterisk) { + width++; + nextChar = _base.input.charCodeAt(_base.state.pos + 2); + tokenType = _types.TokenType.exponent; + } + + // Match *= or %=, disallowing *=> which can be valid in flow. + if ( + nextChar === _charcodes.charCodes.equalsTo && + _base.input.charCodeAt(_base.state.pos + 2) !== _charcodes.charCodes.greaterThan + ) { + width++; + tokenType = _types.TokenType.assign; + } + + finishOp(tokenType, width); +} + +function readToken_pipe_amp(code) { + // '|&' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + + if (nextChar === code) { + if (_base.input.charCodeAt(_base.state.pos + 2) === _charcodes.charCodes.equalsTo) { + // ||= or &&= + finishOp(_types.TokenType.assign, 3); + } else { + // || or && + finishOp(code === _charcodes.charCodes.verticalBar ? _types.TokenType.logicalOR : _types.TokenType.logicalAND, 2); + } + return; + } + + if (code === _charcodes.charCodes.verticalBar) { + // '|>' + if (nextChar === _charcodes.charCodes.greaterThan) { + finishOp(_types.TokenType.pipeline, 2); + return; + } else if (nextChar === _charcodes.charCodes.rightCurlyBrace && _base.isFlowEnabled) { + // '|}' + finishOp(_types.TokenType.braceBarR, 2); + return; + } + } + + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, 2); + return; + } + + finishOp(code === _charcodes.charCodes.verticalBar ? _types.TokenType.bitwiseOR : _types.TokenType.bitwiseAND, 1); +} + +function readToken_caret() { + // '^' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, 2); + } else { + finishOp(_types.TokenType.bitwiseXOR, 1); + } +} + +function readToken_plus_min(code) { + // '+-' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + + if (nextChar === code) { + // Tentatively call this a prefix operator, but it might be changed to postfix later. + finishOp(_types.TokenType.preIncDec, 2); + return; + } + + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, 2); + } else if (code === _charcodes.charCodes.plusSign) { + finishOp(_types.TokenType.plus, 1); + } else { + finishOp(_types.TokenType.minus, 1); + } +} + +// '<>' +function readToken_lt_gt(code) { + // Avoid right-shift for things like Array>. + if (code === _charcodes.charCodes.greaterThan && _base.state.isType) { + finishOp(_types.TokenType.greaterThan, 1); + return; + } + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + + if (nextChar === code) { + const size = + code === _charcodes.charCodes.greaterThan && _base.input.charCodeAt(_base.state.pos + 2) === _charcodes.charCodes.greaterThan + ? 3 + : 2; + if (_base.input.charCodeAt(_base.state.pos + size) === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, size + 1); + return; + } + finishOp(_types.TokenType.bitShift, size); + return; + } + + if (nextChar === _charcodes.charCodes.equalsTo) { + // <= | >= + finishOp(_types.TokenType.relationalOrEqual, 2); + } else if (code === _charcodes.charCodes.lessThan) { + finishOp(_types.TokenType.lessThan, 1); + } else { + finishOp(_types.TokenType.greaterThan, 1); + } +} + +function readToken_eq_excl(code) { + // '=!' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.equality, _base.input.charCodeAt(_base.state.pos + 2) === _charcodes.charCodes.equalsTo ? 3 : 2); + return; + } + if (code === _charcodes.charCodes.equalsTo && nextChar === _charcodes.charCodes.greaterThan) { + // '=>' + _base.state.pos += 2; + finishToken(_types.TokenType.arrow); + return; + } + finishOp(code === _charcodes.charCodes.equalsTo ? _types.TokenType.eq : _types.TokenType.bang, 1); +} + +function readToken_question() { + // '?' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + const nextChar2 = _base.input.charCodeAt(_base.state.pos + 2); + if (nextChar === _charcodes.charCodes.questionMark && !_base.state.isType) { + if (nextChar2 === _charcodes.charCodes.equalsTo) { + // '??=' + finishOp(_types.TokenType.assign, 3); + } else { + // '??' + finishOp(_types.TokenType.nullishCoalescing, 2); + } + } else if ( + nextChar === _charcodes.charCodes.dot && + !(nextChar2 >= _charcodes.charCodes.digit0 && nextChar2 <= _charcodes.charCodes.digit9) + ) { + // '.' not followed by a number + _base.state.pos += 2; + finishToken(_types.TokenType.questionDot); + } else { + ++_base.state.pos; + finishToken(_types.TokenType.question); + } +} + + function getTokenFromCode(code) { + switch (code) { + case _charcodes.charCodes.numberSign: + ++_base.state.pos; + finishToken(_types.TokenType.hash); + return; + + // The interpretation of a dot depends on whether it is followed + // by a digit or another two dots. + + case _charcodes.charCodes.dot: + readToken_dot(); + return; + + // Punctuation tokens. + case _charcodes.charCodes.leftParenthesis: + ++_base.state.pos; + finishToken(_types.TokenType.parenL); + return; + case _charcodes.charCodes.rightParenthesis: + ++_base.state.pos; + finishToken(_types.TokenType.parenR); + return; + case _charcodes.charCodes.semicolon: + ++_base.state.pos; + finishToken(_types.TokenType.semi); + return; + case _charcodes.charCodes.comma: + ++_base.state.pos; + finishToken(_types.TokenType.comma); + return; + case _charcodes.charCodes.leftSquareBracket: + ++_base.state.pos; + finishToken(_types.TokenType.bracketL); + return; + case _charcodes.charCodes.rightSquareBracket: + ++_base.state.pos; + finishToken(_types.TokenType.bracketR); + return; + + case _charcodes.charCodes.leftCurlyBrace: + if (_base.isFlowEnabled && _base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.verticalBar) { + finishOp(_types.TokenType.braceBarL, 2); + } else { + ++_base.state.pos; + finishToken(_types.TokenType.braceL); + } + return; + + case _charcodes.charCodes.rightCurlyBrace: + ++_base.state.pos; + finishToken(_types.TokenType.braceR); + return; + + case _charcodes.charCodes.colon: + if (_base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.colon) { + finishOp(_types.TokenType.doubleColon, 2); + } else { + ++_base.state.pos; + finishToken(_types.TokenType.colon); + } + return; + + case _charcodes.charCodes.questionMark: + readToken_question(); + return; + case _charcodes.charCodes.atSign: + ++_base.state.pos; + finishToken(_types.TokenType.at); + return; + + case _charcodes.charCodes.graveAccent: + ++_base.state.pos; + finishToken(_types.TokenType.backQuote); + return; + + case _charcodes.charCodes.digit0: { + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + // '0x', '0X', '0o', '0O', '0b', '0B' + if ( + nextChar === _charcodes.charCodes.lowercaseX || + nextChar === _charcodes.charCodes.uppercaseX || + nextChar === _charcodes.charCodes.lowercaseO || + nextChar === _charcodes.charCodes.uppercaseO || + nextChar === _charcodes.charCodes.lowercaseB || + nextChar === _charcodes.charCodes.uppercaseB + ) { + readRadixNumber(); + return; + } + } + // Anything else beginning with a digit is an integer, octal + // number, or float. + case _charcodes.charCodes.digit1: + case _charcodes.charCodes.digit2: + case _charcodes.charCodes.digit3: + case _charcodes.charCodes.digit4: + case _charcodes.charCodes.digit5: + case _charcodes.charCodes.digit6: + case _charcodes.charCodes.digit7: + case _charcodes.charCodes.digit8: + case _charcodes.charCodes.digit9: + readNumber(false); + return; + + // Quotes produce strings. + case _charcodes.charCodes.quotationMark: + case _charcodes.charCodes.apostrophe: + readString(code); + return; + + // Operators are parsed inline in tiny state machines. '=' (charCodes.equalsTo) is + // often referred to. `finishOp` simply skips the amount of + // characters it is given as second argument, and returns a token + // of the type given by its first argument. + + case _charcodes.charCodes.slash: + readToken_slash(); + return; + + case _charcodes.charCodes.percentSign: + case _charcodes.charCodes.asterisk: + readToken_mult_modulo(code); + return; + + case _charcodes.charCodes.verticalBar: + case _charcodes.charCodes.ampersand: + readToken_pipe_amp(code); + return; + + case _charcodes.charCodes.caret: + readToken_caret(); + return; + + case _charcodes.charCodes.plusSign: + case _charcodes.charCodes.dash: + readToken_plus_min(code); + return; + + case _charcodes.charCodes.lessThan: + case _charcodes.charCodes.greaterThan: + readToken_lt_gt(code); + return; + + case _charcodes.charCodes.equalsTo: + case _charcodes.charCodes.exclamationMark: + readToken_eq_excl(code); + return; + + case _charcodes.charCodes.tilde: + finishOp(_types.TokenType.tilde, 1); + return; + + default: + break; + } + + _util.unexpected.call(void 0, `Unexpected character '${String.fromCharCode(code)}'`, _base.state.pos); +} exports.getTokenFromCode = getTokenFromCode; + +function finishOp(type, size) { + _base.state.pos += size; + finishToken(type); +} + +function readRegexp() { + const start = _base.state.pos; + let escaped = false; + let inClass = false; + for (;;) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated regular expression", start); + return; + } + const code = _base.input.charCodeAt(_base.state.pos); + if (escaped) { + escaped = false; + } else { + if (code === _charcodes.charCodes.leftSquareBracket) { + inClass = true; + } else if (code === _charcodes.charCodes.rightSquareBracket && inClass) { + inClass = false; + } else if (code === _charcodes.charCodes.slash && !inClass) { + break; + } + escaped = code === _charcodes.charCodes.backslash; + } + ++_base.state.pos; + } + ++_base.state.pos; + // Need to use `skipWord` because '\uXXXX' sequences are allowed here (don't ask). + skipWord(); + + finishToken(_types.TokenType.regexp); +} + +// Read an integer. We allow any valid digit, including hex digits, plus numeric separators, and +// stop at any other character. +function readInt() { + while (true) { + const code = _base.input.charCodeAt(_base.state.pos); + if ( + (code >= _charcodes.charCodes.digit0 && code <= _charcodes.charCodes.digit9) || + (code >= _charcodes.charCodes.lowercaseA && code <= _charcodes.charCodes.lowercaseF) || + (code >= _charcodes.charCodes.uppercaseA && code <= _charcodes.charCodes.uppercaseF) || + code === _charcodes.charCodes.underscore + ) { + _base.state.pos++; + } else { + break; + } + } +} + +function readRadixNumber() { + let isBigInt = false; + + _base.state.pos += 2; // 0x + readInt(); + + if (_base.input.charCodeAt(_base.state.pos) === _charcodes.charCodes.lowercaseN) { + ++_base.state.pos; + isBigInt = true; + } + + if (isBigInt) { + finishToken(_types.TokenType.bigint); + return; + } + + finishToken(_types.TokenType.num); +} + +// Read an integer, octal integer, or floating-point number. +function readNumber(startsWithDot) { + let isBigInt = false; + + if (!startsWithDot) { + readInt(); + } + + let nextChar = _base.input.charCodeAt(_base.state.pos); + if (nextChar === _charcodes.charCodes.dot) { + ++_base.state.pos; + readInt(); + nextChar = _base.input.charCodeAt(_base.state.pos); + } + + if (nextChar === _charcodes.charCodes.uppercaseE || nextChar === _charcodes.charCodes.lowercaseE) { + nextChar = _base.input.charCodeAt(++_base.state.pos); + if (nextChar === _charcodes.charCodes.plusSign || nextChar === _charcodes.charCodes.dash) { + ++_base.state.pos; + } + readInt(); + nextChar = _base.input.charCodeAt(_base.state.pos); + } + + if (nextChar === _charcodes.charCodes.lowercaseN) { + ++_base.state.pos; + isBigInt = true; + } + + if (isBigInt) { + finishToken(_types.TokenType.bigint); + return; + } + finishToken(_types.TokenType.num); +} + +function readString(quote) { + _base.state.pos++; + for (;;) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated string constant"); + return; + } + const ch = _base.input.charCodeAt(_base.state.pos); + if (ch === _charcodes.charCodes.backslash) { + _base.state.pos++; + } else if (ch === quote) { + break; + } + _base.state.pos++; + } + _base.state.pos++; + finishToken(_types.TokenType.string); +} + +// Reads template string tokens. +function readTmplToken() { + for (;;) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated template"); + return; + } + const ch = _base.input.charCodeAt(_base.state.pos); + if ( + ch === _charcodes.charCodes.graveAccent || + (ch === _charcodes.charCodes.dollarSign && _base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.leftCurlyBrace) + ) { + if (_base.state.pos === _base.state.start && match(_types.TokenType.template)) { + if (ch === _charcodes.charCodes.dollarSign) { + _base.state.pos += 2; + finishToken(_types.TokenType.dollarBraceL); + return; + } else { + ++_base.state.pos; + finishToken(_types.TokenType.backQuote); + return; + } + } + finishToken(_types.TokenType.template); + return; + } + if (ch === _charcodes.charCodes.backslash) { + _base.state.pos++; + } + _base.state.pos++; + } +} + +// Skip to the end of the current word. Note that this is the same as the snippet at the end of +// readWord, but calling skipWord from readWord seems to slightly hurt performance from some rough +// measurements. + function skipWord() { + while (_base.state.pos < _base.input.length) { + const ch = _base.input.charCodeAt(_base.state.pos); + if (_identifier.IS_IDENTIFIER_CHAR[ch]) { + _base.state.pos++; + } else if (ch === _charcodes.charCodes.backslash) { + // \u + _base.state.pos += 2; + if (_base.input.charCodeAt(_base.state.pos) === _charcodes.charCodes.leftCurlyBrace) { + while ( + _base.state.pos < _base.input.length && + _base.input.charCodeAt(_base.state.pos) !== _charcodes.charCodes.rightCurlyBrace + ) { + _base.state.pos++; + } + _base.state.pos++; + } + } else { + break; + } + } +} exports.skipWord = skipWord; diff --git a/node_modules/sucrase/dist/parser/tokenizer/index.mjs b/node_modules/sucrase/dist/parser/tokenizer/index.mjs new file mode 100644 index 00000000..0811ae3b --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/index.mjs @@ -0,0 +1,858 @@ +/* eslint max-len: 0 */ + +import {input, isFlowEnabled, state} from "../traverser/base"; +import {unexpected} from "../traverser/util"; +import {charCodes} from "../util/charcodes"; +import {IS_IDENTIFIER_CHAR, IS_IDENTIFIER_START} from "../util/identifier"; +import {IS_WHITESPACE} from "../util/whitespace"; +import {ContextualKeyword} from "./keywords"; +import readWord from "./readWord"; +import { TokenType as tt} from "./types"; + +export var IdentifierRole; (function (IdentifierRole) { + const Access = 0; IdentifierRole[IdentifierRole["Access"] = Access] = "Access"; + const ExportAccess = Access + 1; IdentifierRole[IdentifierRole["ExportAccess"] = ExportAccess] = "ExportAccess"; + const TopLevelDeclaration = ExportAccess + 1; IdentifierRole[IdentifierRole["TopLevelDeclaration"] = TopLevelDeclaration] = "TopLevelDeclaration"; + const FunctionScopedDeclaration = TopLevelDeclaration + 1; IdentifierRole[IdentifierRole["FunctionScopedDeclaration"] = FunctionScopedDeclaration] = "FunctionScopedDeclaration"; + const BlockScopedDeclaration = FunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["BlockScopedDeclaration"] = BlockScopedDeclaration] = "BlockScopedDeclaration"; + const ObjectShorthandTopLevelDeclaration = BlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandTopLevelDeclaration"] = ObjectShorthandTopLevelDeclaration] = "ObjectShorthandTopLevelDeclaration"; + const ObjectShorthandFunctionScopedDeclaration = ObjectShorthandTopLevelDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandFunctionScopedDeclaration"] = ObjectShorthandFunctionScopedDeclaration] = "ObjectShorthandFunctionScopedDeclaration"; + const ObjectShorthandBlockScopedDeclaration = ObjectShorthandFunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandBlockScopedDeclaration"] = ObjectShorthandBlockScopedDeclaration] = "ObjectShorthandBlockScopedDeclaration"; + const ObjectShorthand = ObjectShorthandBlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthand"] = ObjectShorthand] = "ObjectShorthand"; + // Any identifier bound in an import statement, e.g. both A and b from + // `import A, * as b from 'A';` + const ImportDeclaration = ObjectShorthand + 1; IdentifierRole[IdentifierRole["ImportDeclaration"] = ImportDeclaration] = "ImportDeclaration"; + const ObjectKey = ImportDeclaration + 1; IdentifierRole[IdentifierRole["ObjectKey"] = ObjectKey] = "ObjectKey"; + // The `foo` in `import {foo as bar} from "./abc";`. + const ImportAccess = ObjectKey + 1; IdentifierRole[IdentifierRole["ImportAccess"] = ImportAccess] = "ImportAccess"; +})(IdentifierRole || (IdentifierRole = {})); + +export function isDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} + +export function isNonTopLevelDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} + +export function isTopLevelDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ImportDeclaration + ); +} + +export function isBlockScopedDeclaration(token) { + const role = token.identifierRole; + // Treat top-level declarations as block scope since the distinction doesn't matter here. + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} + +export function isFunctionScopedDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration + ); +} + +export function isObjectShorthandDeclaration(token) { + return ( + token.identifierRole === IdentifierRole.ObjectShorthandTopLevelDeclaration || + token.identifierRole === IdentifierRole.ObjectShorthandBlockScopedDeclaration || + token.identifierRole === IdentifierRole.ObjectShorthandFunctionScopedDeclaration + ); +} + +// Object type used to represent tokens. Note that normally, tokens +// simply exist as properties on the parser object. This is only +// used for the onToken callback and the external tokenizer. +export class Token { + constructor() { + this.type = state.type; + this.contextualKeyword = state.contextualKeyword; + this.start = state.start; + this.end = state.end; + this.isType = state.isType; + this.identifierRole = null; + this.shadowsGlobal = false; + this.contextId = null; + this.rhsEndIndex = null; + this.isExpression = false; + } + + + + + + + + // Initially false for all tokens, then may be computed in a follow-up step that does scope + // analysis. + + + // For assignments, the index of the RHS. For export tokens, the end of the export. + + // For class tokens, records if the class is a class expression or a class statement. + +} + +// ## Tokenizer + +// Move to the next token +export function next() { + state.tokens.push(new Token()); + nextToken(); +} + +// Call instead of next when inside a template, since that needs to be handled differently. +export function nextTemplateToken() { + state.tokens.push(new Token()); + state.start = state.pos; + readTmplToken(); +} + +// The tokenizer never parses regexes by default. Instead, the parser is responsible for +// instructing it to parse a regex when we see a slash at the start of an expression. +export function retokenizeSlashAsRegex() { + if (state.type === tt.assign) { + --state.pos; + } + readRegexp(); +} + +export function pushTypeContext(existingTokensInType) { + for (let i = state.tokens.length - existingTokensInType; i < state.tokens.length; i++) { + state.tokens[i].isType = true; + } + const oldIsType = state.isType; + state.isType = true; + return oldIsType; +} + +export function popTypeContext(oldIsType) { + state.isType = oldIsType; +} + +export function eat(type) { + if (match(type)) { + next(); + return true; + } else { + return false; + } +} + +export function match(type) { + return state.type === type; +} + +export function lookaheadType() { + const snapshot = state.snapshot(); + next(); + const type = state.type; + state.restoreFromSnapshot(snapshot); + return type; +} + +export class TypeAndKeyword { + + + constructor(type, contextualKeyword) { + this.type = type; + this.contextualKeyword = contextualKeyword; + } +} + +export function lookaheadTypeAndKeyword() { + const snapshot = state.snapshot(); + next(); + const type = state.type; + const contextualKeyword = state.contextualKeyword; + state.restoreFromSnapshot(snapshot); + return new TypeAndKeyword(type, contextualKeyword); +} + +// Read a single token, updating the parser object's token-related +// properties. +export function nextToken() { + skipSpace(); + state.start = state.pos; + if (state.pos >= input.length) { + const tokens = state.tokens; + // We normally run past the end a bit, but if we're way past the end, avoid an infinite loop. + // Also check the token positions rather than the types since sometimes we rewrite the token + // type to something else. + if ( + tokens.length >= 2 && + tokens[tokens.length - 1].start >= input.length && + tokens[tokens.length - 2].start >= input.length + ) { + unexpected("Unexpectedly reached the end of input."); + } + finishToken(tt.eof); + return; + } + readToken(input.charCodeAt(state.pos)); +} + +function readToken(code) { + // Identifier or keyword. '\uXXXX' sequences are allowed in + // identifiers, so '\' also dispatches to that. + if ( + IS_IDENTIFIER_START[code] || + code === charCodes.backslash || + (code === charCodes.atSign && input.charCodeAt(state.pos + 1) === charCodes.atSign) + ) { + readWord(); + } else { + getTokenFromCode(code); + } +} + +function skipBlockComment() { + while ( + input.charCodeAt(state.pos) !== charCodes.asterisk || + input.charCodeAt(state.pos + 1) !== charCodes.slash + ) { + state.pos++; + if (state.pos > input.length) { + unexpected("Unterminated comment", state.pos - 2); + return; + } + } + state.pos += 2; +} + +export function skipLineComment(startSkip) { + let ch = input.charCodeAt((state.pos += startSkip)); + if (state.pos < input.length) { + while ( + ch !== charCodes.lineFeed && + ch !== charCodes.carriageReturn && + ch !== charCodes.lineSeparator && + ch !== charCodes.paragraphSeparator && + ++state.pos < input.length + ) { + ch = input.charCodeAt(state.pos); + } + } +} + +// Called at the start of the parse and after every token. Skips +// whitespace and comments. +export function skipSpace() { + while (state.pos < input.length) { + const ch = input.charCodeAt(state.pos); + switch (ch) { + case charCodes.carriageReturn: + if (input.charCodeAt(state.pos + 1) === charCodes.lineFeed) { + ++state.pos; + } + + case charCodes.lineFeed: + case charCodes.lineSeparator: + case charCodes.paragraphSeparator: + ++state.pos; + break; + + case charCodes.slash: + switch (input.charCodeAt(state.pos + 1)) { + case charCodes.asterisk: + state.pos += 2; + skipBlockComment(); + break; + + case charCodes.slash: + skipLineComment(2); + break; + + default: + return; + } + break; + + default: + if (IS_WHITESPACE[ch]) { + ++state.pos; + } else { + return; + } + } + } +} + +// Called at the end of every token. Sets various fields, and skips the space after the token, so +// that the next one's `start` will point at the right position. +export function finishToken( + type, + contextualKeyword = ContextualKeyword.NONE, +) { + state.end = state.pos; + state.type = type; + state.contextualKeyword = contextualKeyword; +} + +// ### Token reading + +// This is the function that is called to fetch the next token. It +// is somewhat obscure, because it works in character codes rather +// than characters, and because operator parsing has been inlined +// into it. +// +// All in the name of speed. +function readToken_dot() { + const nextChar = input.charCodeAt(state.pos + 1); + if (nextChar >= charCodes.digit0 && nextChar <= charCodes.digit9) { + readNumber(true); + return; + } + + const next2 = input.charCodeAt(state.pos + 2); + if (nextChar === charCodes.dot && next2 === charCodes.dot) { + state.pos += 3; + finishToken(tt.ellipsis); + } else { + ++state.pos; + finishToken(tt.dot); + } +} + +function readToken_slash() { + const nextChar = input.charCodeAt(state.pos + 1); + if (nextChar === charCodes.equalsTo) { + finishOp(tt.assign, 2); + } else { + finishOp(tt.slash, 1); + } +} + +function readToken_mult_modulo(code) { + // '%*' + let tokenType = code === charCodes.asterisk ? tt.star : tt.modulo; + let width = 1; + let nextChar = input.charCodeAt(state.pos + 1); + + // Exponentiation operator ** + if (code === charCodes.asterisk && nextChar === charCodes.asterisk) { + width++; + nextChar = input.charCodeAt(state.pos + 2); + tokenType = tt.exponent; + } + + // Match *= or %=, disallowing *=> which can be valid in flow. + if ( + nextChar === charCodes.equalsTo && + input.charCodeAt(state.pos + 2) !== charCodes.greaterThan + ) { + width++; + tokenType = tt.assign; + } + + finishOp(tokenType, width); +} + +function readToken_pipe_amp(code) { + // '|&' + const nextChar = input.charCodeAt(state.pos + 1); + + if (nextChar === code) { + if (input.charCodeAt(state.pos + 2) === charCodes.equalsTo) { + // ||= or &&= + finishOp(tt.assign, 3); + } else { + // || or && + finishOp(code === charCodes.verticalBar ? tt.logicalOR : tt.logicalAND, 2); + } + return; + } + + if (code === charCodes.verticalBar) { + // '|>' + if (nextChar === charCodes.greaterThan) { + finishOp(tt.pipeline, 2); + return; + } else if (nextChar === charCodes.rightCurlyBrace && isFlowEnabled) { + // '|}' + finishOp(tt.braceBarR, 2); + return; + } + } + + if (nextChar === charCodes.equalsTo) { + finishOp(tt.assign, 2); + return; + } + + finishOp(code === charCodes.verticalBar ? tt.bitwiseOR : tt.bitwiseAND, 1); +} + +function readToken_caret() { + // '^' + const nextChar = input.charCodeAt(state.pos + 1); + if (nextChar === charCodes.equalsTo) { + finishOp(tt.assign, 2); + } else { + finishOp(tt.bitwiseXOR, 1); + } +} + +function readToken_plus_min(code) { + // '+-' + const nextChar = input.charCodeAt(state.pos + 1); + + if (nextChar === code) { + // Tentatively call this a prefix operator, but it might be changed to postfix later. + finishOp(tt.preIncDec, 2); + return; + } + + if (nextChar === charCodes.equalsTo) { + finishOp(tt.assign, 2); + } else if (code === charCodes.plusSign) { + finishOp(tt.plus, 1); + } else { + finishOp(tt.minus, 1); + } +} + +// '<>' +function readToken_lt_gt(code) { + // Avoid right-shift for things like Array>. + if (code === charCodes.greaterThan && state.isType) { + finishOp(tt.greaterThan, 1); + return; + } + const nextChar = input.charCodeAt(state.pos + 1); + + if (nextChar === code) { + const size = + code === charCodes.greaterThan && input.charCodeAt(state.pos + 2) === charCodes.greaterThan + ? 3 + : 2; + if (input.charCodeAt(state.pos + size) === charCodes.equalsTo) { + finishOp(tt.assign, size + 1); + return; + } + finishOp(tt.bitShift, size); + return; + } + + if (nextChar === charCodes.equalsTo) { + // <= | >= + finishOp(tt.relationalOrEqual, 2); + } else if (code === charCodes.lessThan) { + finishOp(tt.lessThan, 1); + } else { + finishOp(tt.greaterThan, 1); + } +} + +function readToken_eq_excl(code) { + // '=!' + const nextChar = input.charCodeAt(state.pos + 1); + if (nextChar === charCodes.equalsTo) { + finishOp(tt.equality, input.charCodeAt(state.pos + 2) === charCodes.equalsTo ? 3 : 2); + return; + } + if (code === charCodes.equalsTo && nextChar === charCodes.greaterThan) { + // '=>' + state.pos += 2; + finishToken(tt.arrow); + return; + } + finishOp(code === charCodes.equalsTo ? tt.eq : tt.bang, 1); +} + +function readToken_question() { + // '?' + const nextChar = input.charCodeAt(state.pos + 1); + const nextChar2 = input.charCodeAt(state.pos + 2); + if (nextChar === charCodes.questionMark && !state.isType) { + if (nextChar2 === charCodes.equalsTo) { + // '??=' + finishOp(tt.assign, 3); + } else { + // '??' + finishOp(tt.nullishCoalescing, 2); + } + } else if ( + nextChar === charCodes.dot && + !(nextChar2 >= charCodes.digit0 && nextChar2 <= charCodes.digit9) + ) { + // '.' not followed by a number + state.pos += 2; + finishToken(tt.questionDot); + } else { + ++state.pos; + finishToken(tt.question); + } +} + +export function getTokenFromCode(code) { + switch (code) { + case charCodes.numberSign: + ++state.pos; + finishToken(tt.hash); + return; + + // The interpretation of a dot depends on whether it is followed + // by a digit or another two dots. + + case charCodes.dot: + readToken_dot(); + return; + + // Punctuation tokens. + case charCodes.leftParenthesis: + ++state.pos; + finishToken(tt.parenL); + return; + case charCodes.rightParenthesis: + ++state.pos; + finishToken(tt.parenR); + return; + case charCodes.semicolon: + ++state.pos; + finishToken(tt.semi); + return; + case charCodes.comma: + ++state.pos; + finishToken(tt.comma); + return; + case charCodes.leftSquareBracket: + ++state.pos; + finishToken(tt.bracketL); + return; + case charCodes.rightSquareBracket: + ++state.pos; + finishToken(tt.bracketR); + return; + + case charCodes.leftCurlyBrace: + if (isFlowEnabled && input.charCodeAt(state.pos + 1) === charCodes.verticalBar) { + finishOp(tt.braceBarL, 2); + } else { + ++state.pos; + finishToken(tt.braceL); + } + return; + + case charCodes.rightCurlyBrace: + ++state.pos; + finishToken(tt.braceR); + return; + + case charCodes.colon: + if (input.charCodeAt(state.pos + 1) === charCodes.colon) { + finishOp(tt.doubleColon, 2); + } else { + ++state.pos; + finishToken(tt.colon); + } + return; + + case charCodes.questionMark: + readToken_question(); + return; + case charCodes.atSign: + ++state.pos; + finishToken(tt.at); + return; + + case charCodes.graveAccent: + ++state.pos; + finishToken(tt.backQuote); + return; + + case charCodes.digit0: { + const nextChar = input.charCodeAt(state.pos + 1); + // '0x', '0X', '0o', '0O', '0b', '0B' + if ( + nextChar === charCodes.lowercaseX || + nextChar === charCodes.uppercaseX || + nextChar === charCodes.lowercaseO || + nextChar === charCodes.uppercaseO || + nextChar === charCodes.lowercaseB || + nextChar === charCodes.uppercaseB + ) { + readRadixNumber(); + return; + } + } + // Anything else beginning with a digit is an integer, octal + // number, or float. + case charCodes.digit1: + case charCodes.digit2: + case charCodes.digit3: + case charCodes.digit4: + case charCodes.digit5: + case charCodes.digit6: + case charCodes.digit7: + case charCodes.digit8: + case charCodes.digit9: + readNumber(false); + return; + + // Quotes produce strings. + case charCodes.quotationMark: + case charCodes.apostrophe: + readString(code); + return; + + // Operators are parsed inline in tiny state machines. '=' (charCodes.equalsTo) is + // often referred to. `finishOp` simply skips the amount of + // characters it is given as second argument, and returns a token + // of the type given by its first argument. + + case charCodes.slash: + readToken_slash(); + return; + + case charCodes.percentSign: + case charCodes.asterisk: + readToken_mult_modulo(code); + return; + + case charCodes.verticalBar: + case charCodes.ampersand: + readToken_pipe_amp(code); + return; + + case charCodes.caret: + readToken_caret(); + return; + + case charCodes.plusSign: + case charCodes.dash: + readToken_plus_min(code); + return; + + case charCodes.lessThan: + case charCodes.greaterThan: + readToken_lt_gt(code); + return; + + case charCodes.equalsTo: + case charCodes.exclamationMark: + readToken_eq_excl(code); + return; + + case charCodes.tilde: + finishOp(tt.tilde, 1); + return; + + default: + break; + } + + unexpected(`Unexpected character '${String.fromCharCode(code)}'`, state.pos); +} + +function finishOp(type, size) { + state.pos += size; + finishToken(type); +} + +function readRegexp() { + const start = state.pos; + let escaped = false; + let inClass = false; + for (;;) { + if (state.pos >= input.length) { + unexpected("Unterminated regular expression", start); + return; + } + const code = input.charCodeAt(state.pos); + if (escaped) { + escaped = false; + } else { + if (code === charCodes.leftSquareBracket) { + inClass = true; + } else if (code === charCodes.rightSquareBracket && inClass) { + inClass = false; + } else if (code === charCodes.slash && !inClass) { + break; + } + escaped = code === charCodes.backslash; + } + ++state.pos; + } + ++state.pos; + // Need to use `skipWord` because '\uXXXX' sequences are allowed here (don't ask). + skipWord(); + + finishToken(tt.regexp); +} + +// Read an integer. We allow any valid digit, including hex digits, plus numeric separators, and +// stop at any other character. +function readInt() { + while (true) { + const code = input.charCodeAt(state.pos); + if ( + (code >= charCodes.digit0 && code <= charCodes.digit9) || + (code >= charCodes.lowercaseA && code <= charCodes.lowercaseF) || + (code >= charCodes.uppercaseA && code <= charCodes.uppercaseF) || + code === charCodes.underscore + ) { + state.pos++; + } else { + break; + } + } +} + +function readRadixNumber() { + let isBigInt = false; + + state.pos += 2; // 0x + readInt(); + + if (input.charCodeAt(state.pos) === charCodes.lowercaseN) { + ++state.pos; + isBigInt = true; + } + + if (isBigInt) { + finishToken(tt.bigint); + return; + } + + finishToken(tt.num); +} + +// Read an integer, octal integer, or floating-point number. +function readNumber(startsWithDot) { + let isBigInt = false; + + if (!startsWithDot) { + readInt(); + } + + let nextChar = input.charCodeAt(state.pos); + if (nextChar === charCodes.dot) { + ++state.pos; + readInt(); + nextChar = input.charCodeAt(state.pos); + } + + if (nextChar === charCodes.uppercaseE || nextChar === charCodes.lowercaseE) { + nextChar = input.charCodeAt(++state.pos); + if (nextChar === charCodes.plusSign || nextChar === charCodes.dash) { + ++state.pos; + } + readInt(); + nextChar = input.charCodeAt(state.pos); + } + + if (nextChar === charCodes.lowercaseN) { + ++state.pos; + isBigInt = true; + } + + if (isBigInt) { + finishToken(tt.bigint); + return; + } + finishToken(tt.num); +} + +function readString(quote) { + state.pos++; + for (;;) { + if (state.pos >= input.length) { + unexpected("Unterminated string constant"); + return; + } + const ch = input.charCodeAt(state.pos); + if (ch === charCodes.backslash) { + state.pos++; + } else if (ch === quote) { + break; + } + state.pos++; + } + state.pos++; + finishToken(tt.string); +} + +// Reads template string tokens. +function readTmplToken() { + for (;;) { + if (state.pos >= input.length) { + unexpected("Unterminated template"); + return; + } + const ch = input.charCodeAt(state.pos); + if ( + ch === charCodes.graveAccent || + (ch === charCodes.dollarSign && input.charCodeAt(state.pos + 1) === charCodes.leftCurlyBrace) + ) { + if (state.pos === state.start && match(tt.template)) { + if (ch === charCodes.dollarSign) { + state.pos += 2; + finishToken(tt.dollarBraceL); + return; + } else { + ++state.pos; + finishToken(tt.backQuote); + return; + } + } + finishToken(tt.template); + return; + } + if (ch === charCodes.backslash) { + state.pos++; + } + state.pos++; + } +} + +// Skip to the end of the current word. Note that this is the same as the snippet at the end of +// readWord, but calling skipWord from readWord seems to slightly hurt performance from some rough +// measurements. +export function skipWord() { + while (state.pos < input.length) { + const ch = input.charCodeAt(state.pos); + if (IS_IDENTIFIER_CHAR[ch]) { + state.pos++; + } else if (ch === charCodes.backslash) { + // \u + state.pos += 2; + if (input.charCodeAt(state.pos) === charCodes.leftCurlyBrace) { + while ( + state.pos < input.length && + input.charCodeAt(state.pos) !== charCodes.rightCurlyBrace + ) { + state.pos++; + } + state.pos++; + } + } else { + break; + } + } +} diff --git a/node_modules/sucrase/dist/parser/tokenizer/keywords.d.ts b/node_modules/sucrase/dist/parser/tokenizer/keywords.d.ts new file mode 100644 index 00000000..15d2e57b --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/keywords.d.ts @@ -0,0 +1,35 @@ +export declare enum ContextualKeyword { + NONE = 0, + _abstract = 1, + _as = 2, + _async = 3, + _await = 4, + _checks = 5, + _constructor = 6, + _declare = 7, + _enum = 8, + _exports = 9, + _from = 10, + _get = 11, + _global = 12, + _implements = 13, + _infer = 14, + _interface = 15, + _is = 16, + _keyof = 17, + _mixins = 18, + _module = 19, + _namespace = 20, + _of = 21, + _opaque = 22, + _private = 23, + _protected = 24, + _proto = 25, + _public = 26, + _readonly = 27, + _require = 28, + _set = 29, + _static = 30, + _type = 31, + _unique = 32 +} diff --git a/node_modules/sucrase/dist/parser/tokenizer/keywords.js b/node_modules/sucrase/dist/parser/tokenizer/keywords.js new file mode 100644 index 00000000..c51dfe86 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/keywords.js @@ -0,0 +1,35 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var ContextualKeyword; (function (ContextualKeyword) { + const NONE = 0; ContextualKeyword[ContextualKeyword["NONE"] = NONE] = "NONE"; + const _abstract = NONE + 1; ContextualKeyword[ContextualKeyword["_abstract"] = _abstract] = "_abstract"; + const _as = _abstract + 1; ContextualKeyword[ContextualKeyword["_as"] = _as] = "_as"; + const _async = _as + 1; ContextualKeyword[ContextualKeyword["_async"] = _async] = "_async"; + const _await = _async + 1; ContextualKeyword[ContextualKeyword["_await"] = _await] = "_await"; + const _checks = _await + 1; ContextualKeyword[ContextualKeyword["_checks"] = _checks] = "_checks"; + const _constructor = _checks + 1; ContextualKeyword[ContextualKeyword["_constructor"] = _constructor] = "_constructor"; + const _declare = _constructor + 1; ContextualKeyword[ContextualKeyword["_declare"] = _declare] = "_declare"; + const _enum = _declare + 1; ContextualKeyword[ContextualKeyword["_enum"] = _enum] = "_enum"; + const _exports = _enum + 1; ContextualKeyword[ContextualKeyword["_exports"] = _exports] = "_exports"; + const _from = _exports + 1; ContextualKeyword[ContextualKeyword["_from"] = _from] = "_from"; + const _get = _from + 1; ContextualKeyword[ContextualKeyword["_get"] = _get] = "_get"; + const _global = _get + 1; ContextualKeyword[ContextualKeyword["_global"] = _global] = "_global"; + const _implements = _global + 1; ContextualKeyword[ContextualKeyword["_implements"] = _implements] = "_implements"; + const _infer = _implements + 1; ContextualKeyword[ContextualKeyword["_infer"] = _infer] = "_infer"; + const _interface = _infer + 1; ContextualKeyword[ContextualKeyword["_interface"] = _interface] = "_interface"; + const _is = _interface + 1; ContextualKeyword[ContextualKeyword["_is"] = _is] = "_is"; + const _keyof = _is + 1; ContextualKeyword[ContextualKeyword["_keyof"] = _keyof] = "_keyof"; + const _mixins = _keyof + 1; ContextualKeyword[ContextualKeyword["_mixins"] = _mixins] = "_mixins"; + const _module = _mixins + 1; ContextualKeyword[ContextualKeyword["_module"] = _module] = "_module"; + const _namespace = _module + 1; ContextualKeyword[ContextualKeyword["_namespace"] = _namespace] = "_namespace"; + const _of = _namespace + 1; ContextualKeyword[ContextualKeyword["_of"] = _of] = "_of"; + const _opaque = _of + 1; ContextualKeyword[ContextualKeyword["_opaque"] = _opaque] = "_opaque"; + const _private = _opaque + 1; ContextualKeyword[ContextualKeyword["_private"] = _private] = "_private"; + const _protected = _private + 1; ContextualKeyword[ContextualKeyword["_protected"] = _protected] = "_protected"; + const _proto = _protected + 1; ContextualKeyword[ContextualKeyword["_proto"] = _proto] = "_proto"; + const _public = _proto + 1; ContextualKeyword[ContextualKeyword["_public"] = _public] = "_public"; + const _readonly = _public + 1; ContextualKeyword[ContextualKeyword["_readonly"] = _readonly] = "_readonly"; + const _require = _readonly + 1; ContextualKeyword[ContextualKeyword["_require"] = _require] = "_require"; + const _set = _require + 1; ContextualKeyword[ContextualKeyword["_set"] = _set] = "_set"; + const _static = _set + 1; ContextualKeyword[ContextualKeyword["_static"] = _static] = "_static"; + const _type = _static + 1; ContextualKeyword[ContextualKeyword["_type"] = _type] = "_type"; + const _unique = _type + 1; ContextualKeyword[ContextualKeyword["_unique"] = _unique] = "_unique"; +})(ContextualKeyword || (exports.ContextualKeyword = ContextualKeyword = {})); diff --git a/node_modules/sucrase/dist/parser/tokenizer/keywords.mjs b/node_modules/sucrase/dist/parser/tokenizer/keywords.mjs new file mode 100644 index 00000000..fdd5daf9 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/keywords.mjs @@ -0,0 +1,35 @@ +export var ContextualKeyword; (function (ContextualKeyword) { + const NONE = 0; ContextualKeyword[ContextualKeyword["NONE"] = NONE] = "NONE"; + const _abstract = NONE + 1; ContextualKeyword[ContextualKeyword["_abstract"] = _abstract] = "_abstract"; + const _as = _abstract + 1; ContextualKeyword[ContextualKeyword["_as"] = _as] = "_as"; + const _async = _as + 1; ContextualKeyword[ContextualKeyword["_async"] = _async] = "_async"; + const _await = _async + 1; ContextualKeyword[ContextualKeyword["_await"] = _await] = "_await"; + const _checks = _await + 1; ContextualKeyword[ContextualKeyword["_checks"] = _checks] = "_checks"; + const _constructor = _checks + 1; ContextualKeyword[ContextualKeyword["_constructor"] = _constructor] = "_constructor"; + const _declare = _constructor + 1; ContextualKeyword[ContextualKeyword["_declare"] = _declare] = "_declare"; + const _enum = _declare + 1; ContextualKeyword[ContextualKeyword["_enum"] = _enum] = "_enum"; + const _exports = _enum + 1; ContextualKeyword[ContextualKeyword["_exports"] = _exports] = "_exports"; + const _from = _exports + 1; ContextualKeyword[ContextualKeyword["_from"] = _from] = "_from"; + const _get = _from + 1; ContextualKeyword[ContextualKeyword["_get"] = _get] = "_get"; + const _global = _get + 1; ContextualKeyword[ContextualKeyword["_global"] = _global] = "_global"; + const _implements = _global + 1; ContextualKeyword[ContextualKeyword["_implements"] = _implements] = "_implements"; + const _infer = _implements + 1; ContextualKeyword[ContextualKeyword["_infer"] = _infer] = "_infer"; + const _interface = _infer + 1; ContextualKeyword[ContextualKeyword["_interface"] = _interface] = "_interface"; + const _is = _interface + 1; ContextualKeyword[ContextualKeyword["_is"] = _is] = "_is"; + const _keyof = _is + 1; ContextualKeyword[ContextualKeyword["_keyof"] = _keyof] = "_keyof"; + const _mixins = _keyof + 1; ContextualKeyword[ContextualKeyword["_mixins"] = _mixins] = "_mixins"; + const _module = _mixins + 1; ContextualKeyword[ContextualKeyword["_module"] = _module] = "_module"; + const _namespace = _module + 1; ContextualKeyword[ContextualKeyword["_namespace"] = _namespace] = "_namespace"; + const _of = _namespace + 1; ContextualKeyword[ContextualKeyword["_of"] = _of] = "_of"; + const _opaque = _of + 1; ContextualKeyword[ContextualKeyword["_opaque"] = _opaque] = "_opaque"; + const _private = _opaque + 1; ContextualKeyword[ContextualKeyword["_private"] = _private] = "_private"; + const _protected = _private + 1; ContextualKeyword[ContextualKeyword["_protected"] = _protected] = "_protected"; + const _proto = _protected + 1; ContextualKeyword[ContextualKeyword["_proto"] = _proto] = "_proto"; + const _public = _proto + 1; ContextualKeyword[ContextualKeyword["_public"] = _public] = "_public"; + const _readonly = _public + 1; ContextualKeyword[ContextualKeyword["_readonly"] = _readonly] = "_readonly"; + const _require = _readonly + 1; ContextualKeyword[ContextualKeyword["_require"] = _require] = "_require"; + const _set = _require + 1; ContextualKeyword[ContextualKeyword["_set"] = _set] = "_set"; + const _static = _set + 1; ContextualKeyword[ContextualKeyword["_static"] = _static] = "_static"; + const _type = _static + 1; ContextualKeyword[ContextualKeyword["_type"] = _type] = "_type"; + const _unique = _type + 1; ContextualKeyword[ContextualKeyword["_unique"] = _unique] = "_unique"; +})(ContextualKeyword || (ContextualKeyword = {})); diff --git a/node_modules/sucrase/dist/parser/tokenizer/readWord.d.ts b/node_modules/sucrase/dist/parser/tokenizer/readWord.d.ts new file mode 100644 index 00000000..6fdfe909 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/readWord.d.ts @@ -0,0 +1,7 @@ +/** + * Read an identifier, producing either a name token or matching on one of the existing keywords. + * For performance, we pre-generate big decision tree that we traverse. Each node represents a + * prefix and has 27 values, where the first value is the token or contextual token, if any (-1 if + * not), and the other 26 values are the transitions to other nodes, or -1 to stop. + */ +export default function readWord(): void; diff --git a/node_modules/sucrase/dist/parser/tokenizer/readWord.js b/node_modules/sucrase/dist/parser/tokenizer/readWord.js new file mode 100644 index 00000000..69ed5c93 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/readWord.js @@ -0,0 +1,64 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _base = require('../traverser/base'); +var _charcodes = require('../util/charcodes'); +var _identifier = require('../util/identifier'); +var _index = require('./index'); +var _readWordTree = require('./readWordTree'); +var _types = require('./types'); + +/** + * Read an identifier, producing either a name token or matching on one of the existing keywords. + * For performance, we pre-generate big decision tree that we traverse. Each node represents a + * prefix and has 27 values, where the first value is the token or contextual token, if any (-1 if + * not), and the other 26 values are the transitions to other nodes, or -1 to stop. + */ + function readWord() { + let treePos = 0; + let code = 0; + let pos = _base.state.pos; + while (pos < _base.input.length) { + code = _base.input.charCodeAt(pos); + if (code < _charcodes.charCodes.lowercaseA || code > _charcodes.charCodes.lowercaseZ) { + break; + } + const next = _readWordTree.READ_WORD_TREE[treePos + (code - _charcodes.charCodes.lowercaseA) + 1]; + if (next === -1) { + break; + } else { + treePos = next; + pos++; + } + } + + const keywordValue = _readWordTree.READ_WORD_TREE[treePos]; + if (keywordValue > -1 && !_identifier.IS_IDENTIFIER_CHAR[code]) { + _base.state.pos = pos; + if (keywordValue & 1) { + _index.finishToken.call(void 0, keywordValue >>> 1); + } else { + _index.finishToken.call(void 0, _types.TokenType.name, keywordValue >>> 1); + } + return; + } + + while (pos < _base.input.length) { + const ch = _base.input.charCodeAt(pos); + if (_identifier.IS_IDENTIFIER_CHAR[ch]) { + pos++; + } else if (ch === _charcodes.charCodes.backslash) { + // \u + pos += 2; + if (_base.input.charCodeAt(pos) === _charcodes.charCodes.leftCurlyBrace) { + while (pos < _base.input.length && _base.input.charCodeAt(pos) !== _charcodes.charCodes.rightCurlyBrace) { + pos++; + } + pos++; + } + } else if (ch === _charcodes.charCodes.atSign && _base.input.charCodeAt(pos + 1) === _charcodes.charCodes.atSign) { + pos += 2; + } else { + break; + } + } + _base.state.pos = pos; + _index.finishToken.call(void 0, _types.TokenType.name); +} exports.default = readWord; diff --git a/node_modules/sucrase/dist/parser/tokenizer/readWord.mjs b/node_modules/sucrase/dist/parser/tokenizer/readWord.mjs new file mode 100644 index 00000000..cf3df89f --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/readWord.mjs @@ -0,0 +1,64 @@ +import {input, state} from "../traverser/base"; +import {charCodes} from "../util/charcodes"; +import {IS_IDENTIFIER_CHAR} from "../util/identifier"; +import {finishToken} from "./index"; +import {READ_WORD_TREE} from "./readWordTree"; +import {TokenType as tt} from "./types"; + +/** + * Read an identifier, producing either a name token or matching on one of the existing keywords. + * For performance, we pre-generate big decision tree that we traverse. Each node represents a + * prefix and has 27 values, where the first value is the token or contextual token, if any (-1 if + * not), and the other 26 values are the transitions to other nodes, or -1 to stop. + */ +export default function readWord() { + let treePos = 0; + let code = 0; + let pos = state.pos; + while (pos < input.length) { + code = input.charCodeAt(pos); + if (code < charCodes.lowercaseA || code > charCodes.lowercaseZ) { + break; + } + const next = READ_WORD_TREE[treePos + (code - charCodes.lowercaseA) + 1]; + if (next === -1) { + break; + } else { + treePos = next; + pos++; + } + } + + const keywordValue = READ_WORD_TREE[treePos]; + if (keywordValue > -1 && !IS_IDENTIFIER_CHAR[code]) { + state.pos = pos; + if (keywordValue & 1) { + finishToken(keywordValue >>> 1); + } else { + finishToken(tt.name, keywordValue >>> 1); + } + return; + } + + while (pos < input.length) { + const ch = input.charCodeAt(pos); + if (IS_IDENTIFIER_CHAR[ch]) { + pos++; + } else if (ch === charCodes.backslash) { + // \u + pos += 2; + if (input.charCodeAt(pos) === charCodes.leftCurlyBrace) { + while (pos < input.length && input.charCodeAt(pos) !== charCodes.rightCurlyBrace) { + pos++; + } + pos++; + } + } else if (ch === charCodes.atSign && input.charCodeAt(pos + 1) === charCodes.atSign) { + pos += 2; + } else { + break; + } + } + state.pos = pos; + finishToken(tt.name); +} diff --git a/node_modules/sucrase/dist/parser/tokenizer/readWordTree.d.ts b/node_modules/sucrase/dist/parser/tokenizer/readWordTree.d.ts new file mode 100644 index 00000000..f6fb9b24 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/readWordTree.d.ts @@ -0,0 +1 @@ +export declare const READ_WORD_TREE: Int32Array; diff --git a/node_modules/sucrase/dist/parser/tokenizer/readWordTree.js b/node_modules/sucrase/dist/parser/tokenizer/readWordTree.js new file mode 100644 index 00000000..95e87b50 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/readWordTree.js @@ -0,0 +1,595 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});// Generated file, do not edit! Run "yarn generate" to re-generate this file. +var _keywords = require('./keywords'); +var _types = require('./types'); + +// prettier-ignore + const READ_WORD_TREE = new Int32Array([ + // "" + -1, 27, 459, 594, 1431, 2052, 2538, 3159, -1, 3375, -1, 4293, 4428, 4509, 4806, 5184, 5373, -1, 5913, 6372, 6831, 7263, 7425, 7587, -1, 7803, -1, + // "a" + -1, -1, 54, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 243, -1, -1, -1, 351, -1, -1, -1, + // "ab" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 81, -1, -1, -1, -1, -1, -1, -1, + // "abs" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 108, -1, -1, -1, -1, -1, -1, + // "abst" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 135, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstr" + -1, 162, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstra" + -1, -1, -1, 189, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstrac" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 216, -1, -1, -1, -1, -1, -1, + // "abstract" + _keywords.ContextualKeyword._abstract << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "as" + _keywords.ContextualKeyword._as << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 270, -1, + // "asy" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 297, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "asyn" + -1, -1, -1, 324, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "async" + _keywords.ContextualKeyword._async << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "aw" + -1, 378, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "awa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 405, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "awai" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 432, -1, -1, -1, -1, -1, -1, + // "await" + _keywords.ContextualKeyword._await << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "b" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 486, -1, -1, -1, -1, -1, -1, -1, -1, + // "br" + -1, -1, -1, -1, -1, 513, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "bre" + -1, 540, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "brea" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 567, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "break" + (_types.TokenType._break << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "c" + -1, 621, -1, -1, -1, -1, -1, -1, 783, -1, -1, -1, 918, -1, -1, 1026, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ca" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 648, 702, -1, -1, -1, -1, -1, -1, + // "cas" + -1, -1, -1, -1, -1, 675, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "case" + (_types.TokenType._case << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cat" + -1, -1, -1, 729, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "catc" + -1, -1, -1, -1, -1, -1, -1, -1, 756, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "catch" + (_types.TokenType._catch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ch" + -1, -1, -1, -1, -1, 810, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "che" + -1, -1, -1, 837, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "chec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 864, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "check" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 891, -1, -1, -1, -1, -1, -1, -1, + // "checks" + _keywords.ContextualKeyword._checks << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cl" + -1, 945, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cla" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 972, -1, -1, -1, -1, -1, -1, -1, + // "clas" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 999, -1, -1, -1, -1, -1, -1, -1, + // "class" + (_types.TokenType._class << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "co" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1053, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "con" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1080, 1296, -1, -1, -1, -1, -1, -1, + // "cons" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1107, -1, -1, -1, -1, -1, -1, + // "const" + (_types.TokenType._const << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1134, -1, -1, -1, -1, -1, -1, -1, -1, + // "constr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1161, -1, -1, -1, -1, -1, + // "constru" + -1, -1, -1, 1188, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "construc" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1215, -1, -1, -1, -1, -1, -1, + // "construct" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1242, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "constructo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1269, -1, -1, -1, -1, -1, -1, -1, -1, + // "constructor" + _keywords.ContextualKeyword._constructor << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cont" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 1323, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "conti" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1350, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "contin" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1377, -1, -1, -1, -1, -1, + // "continu" + -1, -1, -1, -1, -1, 1404, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "continue" + (_types.TokenType._continue << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "d" + -1, -1, -1, -1, -1, 1458, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2025, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "de" + -1, -1, 1485, 1647, -1, -1, 1782, -1, -1, -1, -1, -1, 1917, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "deb" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1512, -1, -1, -1, -1, -1, + // "debu" + -1, -1, -1, -1, -1, -1, -1, 1539, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debug" + -1, -1, -1, -1, -1, -1, -1, 1566, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugg" + -1, -1, -1, -1, -1, 1593, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugge" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1620, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugger" + (_types.TokenType._debugger << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "dec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1674, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "decl" + -1, 1701, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "decla" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1728, -1, -1, -1, -1, -1, -1, -1, -1, + // "declar" + -1, -1, -1, -1, -1, 1755, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "declare" + _keywords.ContextualKeyword._declare << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "def" + -1, 1809, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "defa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1836, -1, -1, -1, -1, -1, + // "defau" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1863, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "defaul" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1890, -1, -1, -1, -1, -1, -1, + // "default" + (_types.TokenType._default << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "del" + -1, -1, -1, -1, -1, 1944, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "dele" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1971, -1, -1, -1, -1, -1, -1, + // "delet" + -1, -1, -1, -1, -1, 1998, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "delete" + (_types.TokenType._delete << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "do" + (_types.TokenType._do << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "e" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2079, -1, 2160, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2241, -1, -1, + // "el" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2106, -1, -1, -1, -1, -1, -1, -1, + // "els" + -1, -1, -1, -1, -1, 2133, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "else" + (_types.TokenType._else << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "en" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2187, -1, -1, -1, -1, -1, + // "enu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2214, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "enum" + _keywords.ContextualKeyword._enum << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ex" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2268, -1, -1, -1, 2403, -1, -1, -1, -1, -1, -1, + // "exp" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2295, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "expo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2322, -1, -1, -1, -1, -1, -1, -1, -1, + // "expor" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2349, -1, -1, -1, -1, -1, -1, + // "export" + (_types.TokenType._export << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2376, -1, -1, -1, -1, -1, -1, -1, + // "exports" + _keywords.ContextualKeyword._exports << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ext" + -1, -1, -1, -1, -1, 2430, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "exte" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2457, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "exten" + -1, -1, -1, -1, 2484, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "extend" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2511, -1, -1, -1, -1, -1, -1, -1, + // "extends" + (_types.TokenType._extends << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "f" + -1, 2565, -1, -1, -1, -1, -1, -1, -1, 2673, -1, -1, -1, -1, -1, 2835, -1, -1, 2889, -1, -1, 2970, -1, -1, -1, -1, -1, + // "fa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2592, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fal" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2619, -1, -1, -1, -1, -1, -1, -1, + // "fals" + -1, -1, -1, -1, -1, 2646, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "false" + (_types.TokenType._false << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2700, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fin" + -1, 2727, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fina" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2754, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "final" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2781, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "finall" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2808, -1, + // "finally" + (_types.TokenType._finally << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2862, -1, -1, -1, -1, -1, -1, -1, -1, + // "for" + (_types.TokenType._for << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2916, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2943, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "from" + _keywords.ContextualKeyword._from << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2997, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fun" + -1, -1, -1, 3024, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "func" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3051, -1, -1, -1, -1, -1, -1, + // "funct" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 3078, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "functi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3105, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "functio" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3132, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "function" + (_types.TokenType._function << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "g" + -1, -1, -1, -1, -1, 3186, -1, -1, -1, -1, -1, -1, 3240, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ge" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3213, -1, -1, -1, -1, -1, -1, + // "get" + _keywords.ContextualKeyword._get << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "gl" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3267, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "glo" + -1, -1, 3294, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "glob" + -1, 3321, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "globa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3348, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "global" + _keywords.ContextualKeyword._global << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "i" + -1, -1, -1, -1, -1, -1, 3402, -1, -1, -1, -1, -1, -1, 3429, 3753, -1, -1, -1, -1, 4266, -1, -1, -1, -1, -1, -1, -1, + // "if" + (_types.TokenType._if << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "im" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3456, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "imp" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3483, -1, -1, 3672, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impl" + -1, -1, -1, -1, -1, 3510, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "imple" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3537, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "implem" + -1, -1, -1, -1, -1, 3564, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impleme" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3591, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "implemen" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3618, -1, -1, -1, -1, -1, -1, + // "implement" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3645, -1, -1, -1, -1, -1, -1, -1, + // "implements" + _keywords.ContextualKeyword._implements << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3699, -1, -1, -1, -1, -1, -1, -1, -1, + // "impor" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3726, -1, -1, -1, -1, -1, -1, + // "import" + (_types.TokenType._import << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "in" + (_types.TokenType._in << 1) + 1, -1, -1, -1, -1, -1, 3780, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3861, 4077, -1, -1, -1, -1, -1, -1, + // "inf" + -1, -1, -1, -1, -1, 3807, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "infe" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3834, -1, -1, -1, -1, -1, -1, -1, -1, + // "infer" + _keywords.ContextualKeyword._infer << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ins" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3888, -1, -1, -1, -1, -1, -1, + // "inst" + -1, 3915, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "insta" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3942, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instan" + -1, -1, -1, 3969, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanc" + -1, -1, -1, -1, -1, 3996, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instance" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4023, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanceo" + -1, -1, -1, -1, -1, -1, 4050, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanceof" + (_types.TokenType._instanceof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "int" + -1, -1, -1, -1, -1, 4104, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "inte" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4131, -1, -1, -1, -1, -1, -1, -1, -1, + // "inter" + -1, -1, -1, -1, -1, -1, 4158, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interf" + -1, 4185, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interfa" + -1, -1, -1, 4212, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interfac" + -1, -1, -1, -1, -1, 4239, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interface" + _keywords.ContextualKeyword._interface << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "is" + _keywords.ContextualKeyword._is << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "k" + -1, -1, -1, -1, -1, 4320, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ke" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4347, -1, + // "key" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4374, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "keyo" + -1, -1, -1, -1, -1, -1, 4401, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "keyof" + _keywords.ContextualKeyword._keyof << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "l" + -1, -1, -1, -1, -1, 4455, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "le" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4482, -1, -1, -1, -1, -1, -1, + // "let" + (_types.TokenType._let << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "m" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 4536, -1, -1, -1, -1, -1, 4671, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4563, -1, -1, + // "mix" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 4590, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mixi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4617, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mixin" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4644, -1, -1, -1, -1, -1, -1, -1, + // "mixins" + _keywords.ContextualKeyword._mixins << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mo" + -1, -1, -1, -1, 4698, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mod" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4725, -1, -1, -1, -1, -1, + // "modu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4752, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "modul" + -1, -1, -1, -1, -1, 4779, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "module" + _keywords.ContextualKeyword._module << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "n" + -1, 4833, -1, -1, -1, 5049, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5103, -1, -1, -1, -1, -1, + // "na" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4860, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nam" + -1, -1, -1, -1, -1, 4887, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "name" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4914, -1, -1, -1, -1, -1, -1, -1, + // "names" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4941, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namesp" + -1, 4968, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespa" + -1, -1, -1, 4995, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespac" + -1, -1, -1, -1, -1, 5022, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespace" + _keywords.ContextualKeyword._namespace << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ne" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5076, -1, -1, -1, + // "new" + (_types.TokenType._new << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5130, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nul" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5157, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "null" + (_types.TokenType._null << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "o" + -1, -1, -1, -1, -1, -1, 5211, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5238, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "of" + _keywords.ContextualKeyword._of << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "op" + -1, 5265, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5292, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opaq" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5319, -1, -1, -1, -1, -1, + // "opaqu" + -1, -1, -1, -1, -1, 5346, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opaque" + _keywords.ContextualKeyword._opaque << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "p" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5400, -1, -1, 5778, -1, -1, -1, -1, -1, + // "pr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 5427, -1, -1, -1, -1, -1, 5562, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pri" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5454, -1, -1, -1, -1, + // "priv" + -1, 5481, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "priva" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5508, -1, -1, -1, -1, -1, -1, + // "privat" + -1, -1, -1, -1, -1, 5535, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "private" + _keywords.ContextualKeyword._private << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5589, -1, -1, -1, -1, -1, -1, + // "prot" + -1, -1, -1, -1, -1, 5616, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5751, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "prote" + -1, -1, -1, 5643, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5670, -1, -1, -1, -1, -1, -1, + // "protect" + -1, -1, -1, -1, -1, 5697, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protecte" + -1, -1, -1, -1, 5724, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protected" + _keywords.ContextualKeyword._protected << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "proto" + _keywords.ContextualKeyword._proto << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pu" + -1, -1, 5805, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pub" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5832, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "publ" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 5859, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "publi" + -1, -1, -1, 5886, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "public" + _keywords.ContextualKeyword._public << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "r" + -1, -1, -1, -1, -1, 5940, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "re" + -1, 5967, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6129, -1, -1, 6264, -1, -1, -1, -1, -1, -1, + // "rea" + -1, -1, -1, -1, 5994, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "read" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6021, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "reado" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6048, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "readon" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6075, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "readonl" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6102, -1, + // "readonly" + _keywords.ContextualKeyword._readonly << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "req" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6156, -1, -1, -1, -1, -1, + // "requ" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6183, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "requi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6210, -1, -1, -1, -1, -1, -1, -1, -1, + // "requir" + -1, -1, -1, -1, -1, 6237, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "require" + _keywords.ContextualKeyword._require << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ret" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6291, -1, -1, -1, -1, -1, + // "retu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6318, -1, -1, -1, -1, -1, -1, -1, -1, + // "retur" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6345, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "return" + (_types.TokenType._return << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "s" + -1, -1, -1, -1, -1, 6399, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6453, 6588, -1, 6696, -1, -1, -1, + // "se" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6426, -1, -1, -1, -1, -1, -1, + // "set" + _keywords.ContextualKeyword._set << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "st" + -1, 6480, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sta" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6507, -1, -1, -1, -1, -1, -1, + // "stat" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6534, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "stati" + -1, -1, -1, 6561, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "static" + _keywords.ContextualKeyword._static << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "su" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6615, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sup" + -1, -1, -1, -1, -1, 6642, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "supe" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6669, -1, -1, -1, -1, -1, -1, -1, -1, + // "super" + (_types.TokenType._super << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sw" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6723, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "swi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6750, -1, -1, -1, -1, -1, -1, + // "swit" + -1, -1, -1, 6777, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "switc" + -1, -1, -1, -1, -1, -1, -1, -1, 6804, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "switch" + (_types.TokenType._switch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "t" + -1, -1, -1, -1, -1, -1, -1, -1, 6858, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7020, -1, -1, -1, -1, -1, -1, 7128, -1, + // "th" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6885, -1, -1, -1, -1, -1, -1, -1, -1, 6939, -1, -1, -1, -1, -1, -1, -1, -1, + // "thi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6912, -1, -1, -1, -1, -1, -1, -1, + // "this" + (_types.TokenType._this << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "thr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6966, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "thro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6993, -1, -1, -1, + // "throw" + (_types.TokenType._throw << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "tr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7047, -1, -1, -1, 7101, -1, + // "tru" + -1, -1, -1, -1, -1, 7074, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "true" + (_types.TokenType._true << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "try" + (_types.TokenType._try << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ty" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7155, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typ" + -1, -1, -1, -1, -1, 7182, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "type" + _keywords.ContextualKeyword._type << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7209, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typeo" + -1, -1, -1, -1, -1, -1, 7236, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typeof" + (_types.TokenType._typeof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "u" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7290, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "un" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7317, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "uni" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7344, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "uniq" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7371, -1, -1, -1, -1, -1, + // "uniqu" + -1, -1, -1, -1, -1, 7398, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "unique" + _keywords.ContextualKeyword._unique << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "v" + -1, 7452, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7506, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "va" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7479, -1, -1, -1, -1, -1, -1, -1, -1, + // "var" + (_types.TokenType._var << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "vo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7533, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "voi" + -1, -1, -1, -1, 7560, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "void" + (_types.TokenType._void << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "w" + -1, -1, -1, -1, -1, -1, -1, -1, 7614, 7722, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "wh" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7641, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "whi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7668, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "whil" + -1, -1, -1, -1, -1, 7695, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "while" + (_types.TokenType._while << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "wi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7749, -1, -1, -1, -1, -1, -1, + // "wit" + -1, -1, -1, -1, -1, -1, -1, -1, 7776, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "with" + (_types.TokenType._with << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "y" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7830, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yi" + -1, -1, -1, -1, -1, 7857, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yie" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7884, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yiel" + -1, -1, -1, -1, 7911, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yield" + (_types.TokenType._yield << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, +]); exports.READ_WORD_TREE = READ_WORD_TREE; diff --git a/node_modules/sucrase/dist/parser/tokenizer/readWordTree.mjs b/node_modules/sucrase/dist/parser/tokenizer/readWordTree.mjs new file mode 100644 index 00000000..699c6290 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/readWordTree.mjs @@ -0,0 +1,595 @@ +// Generated file, do not edit! Run "yarn generate" to re-generate this file. +import {ContextualKeyword} from "./keywords"; +import {TokenType as tt} from "./types"; + +// prettier-ignore +export const READ_WORD_TREE = new Int32Array([ + // "" + -1, 27, 459, 594, 1431, 2052, 2538, 3159, -1, 3375, -1, 4293, 4428, 4509, 4806, 5184, 5373, -1, 5913, 6372, 6831, 7263, 7425, 7587, -1, 7803, -1, + // "a" + -1, -1, 54, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 243, -1, -1, -1, 351, -1, -1, -1, + // "ab" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 81, -1, -1, -1, -1, -1, -1, -1, + // "abs" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 108, -1, -1, -1, -1, -1, -1, + // "abst" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 135, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstr" + -1, 162, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstra" + -1, -1, -1, 189, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstrac" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 216, -1, -1, -1, -1, -1, -1, + // "abstract" + ContextualKeyword._abstract << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "as" + ContextualKeyword._as << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 270, -1, + // "asy" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 297, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "asyn" + -1, -1, -1, 324, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "async" + ContextualKeyword._async << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "aw" + -1, 378, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "awa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 405, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "awai" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 432, -1, -1, -1, -1, -1, -1, + // "await" + ContextualKeyword._await << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "b" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 486, -1, -1, -1, -1, -1, -1, -1, -1, + // "br" + -1, -1, -1, -1, -1, 513, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "bre" + -1, 540, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "brea" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 567, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "break" + (tt._break << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "c" + -1, 621, -1, -1, -1, -1, -1, -1, 783, -1, -1, -1, 918, -1, -1, 1026, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ca" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 648, 702, -1, -1, -1, -1, -1, -1, + // "cas" + -1, -1, -1, -1, -1, 675, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "case" + (tt._case << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cat" + -1, -1, -1, 729, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "catc" + -1, -1, -1, -1, -1, -1, -1, -1, 756, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "catch" + (tt._catch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ch" + -1, -1, -1, -1, -1, 810, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "che" + -1, -1, -1, 837, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "chec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 864, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "check" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 891, -1, -1, -1, -1, -1, -1, -1, + // "checks" + ContextualKeyword._checks << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cl" + -1, 945, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cla" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 972, -1, -1, -1, -1, -1, -1, -1, + // "clas" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 999, -1, -1, -1, -1, -1, -1, -1, + // "class" + (tt._class << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "co" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1053, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "con" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1080, 1296, -1, -1, -1, -1, -1, -1, + // "cons" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1107, -1, -1, -1, -1, -1, -1, + // "const" + (tt._const << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1134, -1, -1, -1, -1, -1, -1, -1, -1, + // "constr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1161, -1, -1, -1, -1, -1, + // "constru" + -1, -1, -1, 1188, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "construc" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1215, -1, -1, -1, -1, -1, -1, + // "construct" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1242, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "constructo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1269, -1, -1, -1, -1, -1, -1, -1, -1, + // "constructor" + ContextualKeyword._constructor << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cont" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 1323, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "conti" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1350, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "contin" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1377, -1, -1, -1, -1, -1, + // "continu" + -1, -1, -1, -1, -1, 1404, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "continue" + (tt._continue << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "d" + -1, -1, -1, -1, -1, 1458, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2025, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "de" + -1, -1, 1485, 1647, -1, -1, 1782, -1, -1, -1, -1, -1, 1917, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "deb" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1512, -1, -1, -1, -1, -1, + // "debu" + -1, -1, -1, -1, -1, -1, -1, 1539, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debug" + -1, -1, -1, -1, -1, -1, -1, 1566, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugg" + -1, -1, -1, -1, -1, 1593, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugge" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1620, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugger" + (tt._debugger << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "dec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1674, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "decl" + -1, 1701, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "decla" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1728, -1, -1, -1, -1, -1, -1, -1, -1, + // "declar" + -1, -1, -1, -1, -1, 1755, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "declare" + ContextualKeyword._declare << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "def" + -1, 1809, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "defa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1836, -1, -1, -1, -1, -1, + // "defau" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1863, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "defaul" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1890, -1, -1, -1, -1, -1, -1, + // "default" + (tt._default << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "del" + -1, -1, -1, -1, -1, 1944, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "dele" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1971, -1, -1, -1, -1, -1, -1, + // "delet" + -1, -1, -1, -1, -1, 1998, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "delete" + (tt._delete << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "do" + (tt._do << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "e" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2079, -1, 2160, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2241, -1, -1, + // "el" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2106, -1, -1, -1, -1, -1, -1, -1, + // "els" + -1, -1, -1, -1, -1, 2133, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "else" + (tt._else << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "en" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2187, -1, -1, -1, -1, -1, + // "enu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2214, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "enum" + ContextualKeyword._enum << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ex" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2268, -1, -1, -1, 2403, -1, -1, -1, -1, -1, -1, + // "exp" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2295, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "expo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2322, -1, -1, -1, -1, -1, -1, -1, -1, + // "expor" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2349, -1, -1, -1, -1, -1, -1, + // "export" + (tt._export << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2376, -1, -1, -1, -1, -1, -1, -1, + // "exports" + ContextualKeyword._exports << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ext" + -1, -1, -1, -1, -1, 2430, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "exte" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2457, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "exten" + -1, -1, -1, -1, 2484, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "extend" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2511, -1, -1, -1, -1, -1, -1, -1, + // "extends" + (tt._extends << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "f" + -1, 2565, -1, -1, -1, -1, -1, -1, -1, 2673, -1, -1, -1, -1, -1, 2835, -1, -1, 2889, -1, -1, 2970, -1, -1, -1, -1, -1, + // "fa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2592, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fal" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2619, -1, -1, -1, -1, -1, -1, -1, + // "fals" + -1, -1, -1, -1, -1, 2646, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "false" + (tt._false << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2700, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fin" + -1, 2727, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fina" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2754, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "final" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2781, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "finall" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2808, -1, + // "finally" + (tt._finally << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2862, -1, -1, -1, -1, -1, -1, -1, -1, + // "for" + (tt._for << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2916, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2943, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "from" + ContextualKeyword._from << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2997, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fun" + -1, -1, -1, 3024, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "func" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3051, -1, -1, -1, -1, -1, -1, + // "funct" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 3078, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "functi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3105, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "functio" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3132, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "function" + (tt._function << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "g" + -1, -1, -1, -1, -1, 3186, -1, -1, -1, -1, -1, -1, 3240, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ge" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3213, -1, -1, -1, -1, -1, -1, + // "get" + ContextualKeyword._get << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "gl" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3267, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "glo" + -1, -1, 3294, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "glob" + -1, 3321, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "globa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3348, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "global" + ContextualKeyword._global << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "i" + -1, -1, -1, -1, -1, -1, 3402, -1, -1, -1, -1, -1, -1, 3429, 3753, -1, -1, -1, -1, 4266, -1, -1, -1, -1, -1, -1, -1, + // "if" + (tt._if << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "im" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3456, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "imp" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3483, -1, -1, 3672, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impl" + -1, -1, -1, -1, -1, 3510, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "imple" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3537, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "implem" + -1, -1, -1, -1, -1, 3564, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impleme" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3591, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "implemen" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3618, -1, -1, -1, -1, -1, -1, + // "implement" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3645, -1, -1, -1, -1, -1, -1, -1, + // "implements" + ContextualKeyword._implements << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3699, -1, -1, -1, -1, -1, -1, -1, -1, + // "impor" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3726, -1, -1, -1, -1, -1, -1, + // "import" + (tt._import << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "in" + (tt._in << 1) + 1, -1, -1, -1, -1, -1, 3780, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3861, 4077, -1, -1, -1, -1, -1, -1, + // "inf" + -1, -1, -1, -1, -1, 3807, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "infe" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3834, -1, -1, -1, -1, -1, -1, -1, -1, + // "infer" + ContextualKeyword._infer << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ins" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3888, -1, -1, -1, -1, -1, -1, + // "inst" + -1, 3915, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "insta" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3942, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instan" + -1, -1, -1, 3969, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanc" + -1, -1, -1, -1, -1, 3996, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instance" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4023, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanceo" + -1, -1, -1, -1, -1, -1, 4050, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanceof" + (tt._instanceof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "int" + -1, -1, -1, -1, -1, 4104, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "inte" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4131, -1, -1, -1, -1, -1, -1, -1, -1, + // "inter" + -1, -1, -1, -1, -1, -1, 4158, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interf" + -1, 4185, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interfa" + -1, -1, -1, 4212, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interfac" + -1, -1, -1, -1, -1, 4239, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interface" + ContextualKeyword._interface << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "is" + ContextualKeyword._is << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "k" + -1, -1, -1, -1, -1, 4320, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ke" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4347, -1, + // "key" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4374, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "keyo" + -1, -1, -1, -1, -1, -1, 4401, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "keyof" + ContextualKeyword._keyof << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "l" + -1, -1, -1, -1, -1, 4455, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "le" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4482, -1, -1, -1, -1, -1, -1, + // "let" + (tt._let << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "m" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 4536, -1, -1, -1, -1, -1, 4671, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4563, -1, -1, + // "mix" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 4590, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mixi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4617, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mixin" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4644, -1, -1, -1, -1, -1, -1, -1, + // "mixins" + ContextualKeyword._mixins << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mo" + -1, -1, -1, -1, 4698, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mod" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4725, -1, -1, -1, -1, -1, + // "modu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4752, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "modul" + -1, -1, -1, -1, -1, 4779, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "module" + ContextualKeyword._module << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "n" + -1, 4833, -1, -1, -1, 5049, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5103, -1, -1, -1, -1, -1, + // "na" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4860, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nam" + -1, -1, -1, -1, -1, 4887, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "name" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4914, -1, -1, -1, -1, -1, -1, -1, + // "names" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4941, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namesp" + -1, 4968, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespa" + -1, -1, -1, 4995, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespac" + -1, -1, -1, -1, -1, 5022, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespace" + ContextualKeyword._namespace << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ne" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5076, -1, -1, -1, + // "new" + (tt._new << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5130, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nul" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5157, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "null" + (tt._null << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "o" + -1, -1, -1, -1, -1, -1, 5211, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5238, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "of" + ContextualKeyword._of << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "op" + -1, 5265, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5292, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opaq" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5319, -1, -1, -1, -1, -1, + // "opaqu" + -1, -1, -1, -1, -1, 5346, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opaque" + ContextualKeyword._opaque << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "p" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5400, -1, -1, 5778, -1, -1, -1, -1, -1, + // "pr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 5427, -1, -1, -1, -1, -1, 5562, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pri" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5454, -1, -1, -1, -1, + // "priv" + -1, 5481, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "priva" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5508, -1, -1, -1, -1, -1, -1, + // "privat" + -1, -1, -1, -1, -1, 5535, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "private" + ContextualKeyword._private << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5589, -1, -1, -1, -1, -1, -1, + // "prot" + -1, -1, -1, -1, -1, 5616, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5751, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "prote" + -1, -1, -1, 5643, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5670, -1, -1, -1, -1, -1, -1, + // "protect" + -1, -1, -1, -1, -1, 5697, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protecte" + -1, -1, -1, -1, 5724, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protected" + ContextualKeyword._protected << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "proto" + ContextualKeyword._proto << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pu" + -1, -1, 5805, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pub" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5832, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "publ" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 5859, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "publi" + -1, -1, -1, 5886, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "public" + ContextualKeyword._public << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "r" + -1, -1, -1, -1, -1, 5940, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "re" + -1, 5967, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6129, -1, -1, 6264, -1, -1, -1, -1, -1, -1, + // "rea" + -1, -1, -1, -1, 5994, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "read" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6021, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "reado" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6048, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "readon" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6075, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "readonl" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6102, -1, + // "readonly" + ContextualKeyword._readonly << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "req" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6156, -1, -1, -1, -1, -1, + // "requ" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6183, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "requi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6210, -1, -1, -1, -1, -1, -1, -1, -1, + // "requir" + -1, -1, -1, -1, -1, 6237, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "require" + ContextualKeyword._require << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ret" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6291, -1, -1, -1, -1, -1, + // "retu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6318, -1, -1, -1, -1, -1, -1, -1, -1, + // "retur" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6345, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "return" + (tt._return << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "s" + -1, -1, -1, -1, -1, 6399, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6453, 6588, -1, 6696, -1, -1, -1, + // "se" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6426, -1, -1, -1, -1, -1, -1, + // "set" + ContextualKeyword._set << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "st" + -1, 6480, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sta" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6507, -1, -1, -1, -1, -1, -1, + // "stat" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6534, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "stati" + -1, -1, -1, 6561, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "static" + ContextualKeyword._static << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "su" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6615, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sup" + -1, -1, -1, -1, -1, 6642, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "supe" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6669, -1, -1, -1, -1, -1, -1, -1, -1, + // "super" + (tt._super << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sw" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6723, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "swi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6750, -1, -1, -1, -1, -1, -1, + // "swit" + -1, -1, -1, 6777, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "switc" + -1, -1, -1, -1, -1, -1, -1, -1, 6804, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "switch" + (tt._switch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "t" + -1, -1, -1, -1, -1, -1, -1, -1, 6858, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7020, -1, -1, -1, -1, -1, -1, 7128, -1, + // "th" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6885, -1, -1, -1, -1, -1, -1, -1, -1, 6939, -1, -1, -1, -1, -1, -1, -1, -1, + // "thi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6912, -1, -1, -1, -1, -1, -1, -1, + // "this" + (tt._this << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "thr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6966, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "thro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6993, -1, -1, -1, + // "throw" + (tt._throw << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "tr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7047, -1, -1, -1, 7101, -1, + // "tru" + -1, -1, -1, -1, -1, 7074, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "true" + (tt._true << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "try" + (tt._try << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ty" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7155, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typ" + -1, -1, -1, -1, -1, 7182, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "type" + ContextualKeyword._type << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7209, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typeo" + -1, -1, -1, -1, -1, -1, 7236, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typeof" + (tt._typeof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "u" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7290, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "un" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7317, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "uni" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7344, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "uniq" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7371, -1, -1, -1, -1, -1, + // "uniqu" + -1, -1, -1, -1, -1, 7398, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "unique" + ContextualKeyword._unique << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "v" + -1, 7452, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7506, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "va" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7479, -1, -1, -1, -1, -1, -1, -1, -1, + // "var" + (tt._var << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "vo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7533, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "voi" + -1, -1, -1, -1, 7560, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "void" + (tt._void << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "w" + -1, -1, -1, -1, -1, -1, -1, -1, 7614, 7722, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "wh" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7641, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "whi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7668, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "whil" + -1, -1, -1, -1, -1, 7695, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "while" + (tt._while << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "wi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7749, -1, -1, -1, -1, -1, -1, + // "wit" + -1, -1, -1, -1, -1, -1, -1, -1, 7776, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "with" + (tt._with << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "y" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7830, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yi" + -1, -1, -1, -1, -1, 7857, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yie" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7884, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yiel" + -1, -1, -1, -1, 7911, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yield" + (tt._yield << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, +]); diff --git a/node_modules/sucrase/dist/parser/tokenizer/state.d.ts b/node_modules/sucrase/dist/parser/tokenizer/state.d.ts new file mode 100644 index 00000000..756a45ae --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/state.d.ts @@ -0,0 +1,48 @@ +import { Token } from "./index"; +import { ContextualKeyword } from "./keywords"; +import { TokenType } from "./types"; +export declare class Scope { + startTokenIndex: number; + endTokenIndex: number; + isFunctionScope: boolean; + constructor(startTokenIndex: number, endTokenIndex: number, isFunctionScope: boolean); +} +export declare class StateSnapshot { + readonly potentialArrowAt: number; + readonly noAnonFunctionType: boolean; + readonly tokensLength: number; + readonly scopesLength: number; + readonly pos: number; + readonly type: TokenType; + readonly contextualKeyword: ContextualKeyword; + readonly start: number; + readonly end: number; + readonly isType: boolean; + readonly scopeDepth: number; + readonly error: Error | null; + constructor(potentialArrowAt: number, noAnonFunctionType: boolean, tokensLength: number, scopesLength: number, pos: number, type: TokenType, contextualKeyword: ContextualKeyword, start: number, end: number, isType: boolean, scopeDepth: number, error: Error | null); +} +export default class State { + potentialArrowAt: number; + noAnonFunctionType: boolean; + tokens: Array; + scopes: Array; + pos: number; + type: TokenType; + contextualKeyword: ContextualKeyword; + start: number; + end: number; + isType: boolean; + scopeDepth: number; + /** + * If the parser is in an error state, then the token is always tt.eof and all functions can + * keep executing but should be written so they don't get into an infinite loop in this situation. + * + * This approach, combined with the ability to snapshot and restore state, allows us to implement + * backtracking without exceptions and without needing to explicitly propagate error states + * everywhere. + */ + error: Error | null; + snapshot(): StateSnapshot; + restoreFromSnapshot(snapshot: StateSnapshot): void; +} diff --git a/node_modules/sucrase/dist/parser/tokenizer/state.js b/node_modules/sucrase/dist/parser/tokenizer/state.js new file mode 100644 index 00000000..2c8a3150 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/state.js @@ -0,0 +1,100 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); +var _keywords = require('./keywords'); +var _types = require('./types'); + + class Scope { + + + + + constructor(startTokenIndex, endTokenIndex, isFunctionScope) { + this.startTokenIndex = startTokenIndex; + this.endTokenIndex = endTokenIndex; + this.isFunctionScope = isFunctionScope; + } +} exports.Scope = Scope; + + class StateSnapshot { + constructor( + potentialArrowAt, + noAnonFunctionType, + tokensLength, + scopesLength, + pos, + type, + contextualKeyword, + start, + end, + isType, + scopeDepth, + error, + ) {;this.potentialArrowAt = potentialArrowAt;this.noAnonFunctionType = noAnonFunctionType;this.tokensLength = tokensLength;this.scopesLength = scopesLength;this.pos = pos;this.type = type;this.contextualKeyword = contextualKeyword;this.start = start;this.end = end;this.isType = isType;this.scopeDepth = scopeDepth;this.error = error;} +} exports.StateSnapshot = StateSnapshot; + + class State {constructor() { State.prototype.__init.call(this);State.prototype.__init2.call(this);State.prototype.__init3.call(this);State.prototype.__init4.call(this);State.prototype.__init5.call(this);State.prototype.__init6.call(this);State.prototype.__init7.call(this);State.prototype.__init8.call(this);State.prototype.__init9.call(this);State.prototype.__init10.call(this);State.prototype.__init11.call(this);State.prototype.__init12.call(this); } + // Used to signify the start of a potential arrow function + __init() {this.potentialArrowAt = -1} + + // Used by Flow to handle an edge case involving function type parsing. + __init2() {this.noAnonFunctionType = false} + + // Token store. + __init3() {this.tokens = []} + + // Array of all observed scopes, ordered by their ending position. + __init4() {this.scopes = []} + + // The current position of the tokenizer in the input. + __init5() {this.pos = 0} + + // Information about the current token. + __init6() {this.type = _types.TokenType.eof} + __init7() {this.contextualKeyword = _keywords.ContextualKeyword.NONE} + __init8() {this.start = 0} + __init9() {this.end = 0} + + __init10() {this.isType = false} + __init11() {this.scopeDepth = 0} + + /** + * If the parser is in an error state, then the token is always tt.eof and all functions can + * keep executing but should be written so they don't get into an infinite loop in this situation. + * + * This approach, combined with the ability to snapshot and restore state, allows us to implement + * backtracking without exceptions and without needing to explicitly propagate error states + * everywhere. + */ + __init12() {this.error = null} + + snapshot() { + return new StateSnapshot( + this.potentialArrowAt, + this.noAnonFunctionType, + this.tokens.length, + this.scopes.length, + this.pos, + this.type, + this.contextualKeyword, + this.start, + this.end, + this.isType, + this.scopeDepth, + this.error, + ); + } + + restoreFromSnapshot(snapshot) { + this.potentialArrowAt = snapshot.potentialArrowAt; + this.noAnonFunctionType = snapshot.noAnonFunctionType; + this.tokens.length = snapshot.tokensLength; + this.scopes.length = snapshot.scopesLength; + this.pos = snapshot.pos; + this.type = snapshot.type; + this.contextualKeyword = snapshot.contextualKeyword; + this.start = snapshot.start; + this.end = snapshot.end; + this.isType = snapshot.isType; + this.scopeDepth = snapshot.scopeDepth; + this.error = snapshot.error; + } +} exports.default = State; diff --git a/node_modules/sucrase/dist/parser/tokenizer/state.mjs b/node_modules/sucrase/dist/parser/tokenizer/state.mjs new file mode 100644 index 00000000..69ea8e31 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/state.mjs @@ -0,0 +1,100 @@ + +import {ContextualKeyword} from "./keywords"; +import { TokenType as tt} from "./types"; + +export class Scope { + + + + + constructor(startTokenIndex, endTokenIndex, isFunctionScope) { + this.startTokenIndex = startTokenIndex; + this.endTokenIndex = endTokenIndex; + this.isFunctionScope = isFunctionScope; + } +} + +export class StateSnapshot { + constructor( + potentialArrowAt, + noAnonFunctionType, + tokensLength, + scopesLength, + pos, + type, + contextualKeyword, + start, + end, + isType, + scopeDepth, + error, + ) {;this.potentialArrowAt = potentialArrowAt;this.noAnonFunctionType = noAnonFunctionType;this.tokensLength = tokensLength;this.scopesLength = scopesLength;this.pos = pos;this.type = type;this.contextualKeyword = contextualKeyword;this.start = start;this.end = end;this.isType = isType;this.scopeDepth = scopeDepth;this.error = error;} +} + +export default class State {constructor() { State.prototype.__init.call(this);State.prototype.__init2.call(this);State.prototype.__init3.call(this);State.prototype.__init4.call(this);State.prototype.__init5.call(this);State.prototype.__init6.call(this);State.prototype.__init7.call(this);State.prototype.__init8.call(this);State.prototype.__init9.call(this);State.prototype.__init10.call(this);State.prototype.__init11.call(this);State.prototype.__init12.call(this); } + // Used to signify the start of a potential arrow function + __init() {this.potentialArrowAt = -1} + + // Used by Flow to handle an edge case involving function type parsing. + __init2() {this.noAnonFunctionType = false} + + // Token store. + __init3() {this.tokens = []} + + // Array of all observed scopes, ordered by their ending position. + __init4() {this.scopes = []} + + // The current position of the tokenizer in the input. + __init5() {this.pos = 0} + + // Information about the current token. + __init6() {this.type = tt.eof} + __init7() {this.contextualKeyword = ContextualKeyword.NONE} + __init8() {this.start = 0} + __init9() {this.end = 0} + + __init10() {this.isType = false} + __init11() {this.scopeDepth = 0} + + /** + * If the parser is in an error state, then the token is always tt.eof and all functions can + * keep executing but should be written so they don't get into an infinite loop in this situation. + * + * This approach, combined with the ability to snapshot and restore state, allows us to implement + * backtracking without exceptions and without needing to explicitly propagate error states + * everywhere. + */ + __init12() {this.error = null} + + snapshot() { + return new StateSnapshot( + this.potentialArrowAt, + this.noAnonFunctionType, + this.tokens.length, + this.scopes.length, + this.pos, + this.type, + this.contextualKeyword, + this.start, + this.end, + this.isType, + this.scopeDepth, + this.error, + ); + } + + restoreFromSnapshot(snapshot) { + this.potentialArrowAt = snapshot.potentialArrowAt; + this.noAnonFunctionType = snapshot.noAnonFunctionType; + this.tokens.length = snapshot.tokensLength; + this.scopes.length = snapshot.scopesLength; + this.pos = snapshot.pos; + this.type = snapshot.type; + this.contextualKeyword = snapshot.contextualKeyword; + this.start = snapshot.start; + this.end = snapshot.end; + this.isType = snapshot.isType; + this.scopeDepth = snapshot.scopeDepth; + this.error = snapshot.error; + } +} diff --git a/node_modules/sucrase/dist/parser/tokenizer/types.d.ts b/node_modules/sucrase/dist/parser/tokenizer/types.d.ts new file mode 100644 index 00000000..797a0bc7 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/types.d.ts @@ -0,0 +1,121 @@ +/** + * Enum of all token types, with bit fields to signify meaningful properties. + */ +export declare enum TokenType { + PRECEDENCE_MASK = 15, + IS_KEYWORD = 16, + IS_ASSIGN = 32, + IS_RIGHT_ASSOCIATIVE = 64, + IS_PREFIX = 128, + IS_POSTFIX = 256, + num = 0, + bigint = 512, + regexp = 1024, + string = 1536, + name = 2048, + eof = 2560, + bracketL = 3072, + bracketR = 3584, + braceL = 4096, + braceBarL = 4608, + braceR = 5120, + braceBarR = 5632, + parenL = 6144, + parenR = 6656, + comma = 7168, + semi = 7680, + colon = 8192, + doubleColon = 8704, + dot = 9216, + question = 9728, + questionDot = 10240, + arrow = 10752, + template = 11264, + ellipsis = 11776, + backQuote = 12288, + dollarBraceL = 12800, + at = 13312, + hash = 13824, + eq = 14368, + assign = 14880, + preIncDec = 15744, + postIncDec = 16256, + bang = 16512, + tilde = 17024, + pipeline = 17409, + nullishCoalescing = 17922, + logicalOR = 18434, + logicalAND = 18947, + bitwiseOR = 19460, + bitwiseXOR = 19973, + bitwiseAND = 20486, + equality = 20999, + lessThan = 21512, + greaterThan = 22024, + relationalOrEqual = 22536, + bitShift = 23049, + plus = 23690, + minus = 24202, + modulo = 24587, + star = 25099, + slash = 25611, + exponent = 26188, + jsxName = 26624, + jsxText = 27136, + jsxTagStart = 27648, + jsxTagEnd = 28160, + typeParameterStart = 28672, + nonNullAssertion = 29184, + _break = 29712, + _case = 30224, + _catch = 30736, + _continue = 31248, + _debugger = 31760, + _default = 32272, + _do = 32784, + _else = 33296, + _finally = 33808, + _for = 34320, + _function = 34832, + _if = 35344, + _return = 35856, + _switch = 36368, + _throw = 37008, + _try = 37392, + _var = 37904, + _let = 38416, + _const = 38928, + _while = 39440, + _with = 39952, + _new = 40464, + _this = 40976, + _super = 41488, + _class = 42000, + _extends = 42512, + _export = 43024, + _import = 43536, + _yield = 44048, + _null = 44560, + _true = 45072, + _false = 45584, + _in = 46104, + _instanceof = 46616, + _typeof = 47248, + _void = 47760, + _delete = 48272, + _async = 48656, + _get = 49168, + _set = 49680, + _declare = 50192, + _readonly = 50704, + _abstract = 51216, + _static = 51728, + _public = 52240, + _private = 52752, + _protected = 53264, + _as = 53776, + _enum = 54288, + _type = 54800, + _implements = 55312 +} +export declare function formatTokenType(tokenType: TokenType): string; diff --git a/node_modules/sucrase/dist/parser/tokenizer/types.js b/node_modules/sucrase/dist/parser/tokenizer/types.js new file mode 100644 index 00000000..08cb0f59 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/types.js @@ -0,0 +1,347 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});// Generated file, do not edit! Run "yarn generate" to re-generate this file. +/** + * Enum of all token types, with bit fields to signify meaningful properties. + */ +var TokenType; (function (TokenType) { + // Precedence 0 means not an operator; otherwise it is a positive number up to 12. + const PRECEDENCE_MASK = 0xf; TokenType[TokenType["PRECEDENCE_MASK"] = PRECEDENCE_MASK] = "PRECEDENCE_MASK"; + const IS_KEYWORD = 1 << 4; TokenType[TokenType["IS_KEYWORD"] = IS_KEYWORD] = "IS_KEYWORD"; + const IS_ASSIGN = 1 << 5; TokenType[TokenType["IS_ASSIGN"] = IS_ASSIGN] = "IS_ASSIGN"; + const IS_RIGHT_ASSOCIATIVE = 1 << 6; TokenType[TokenType["IS_RIGHT_ASSOCIATIVE"] = IS_RIGHT_ASSOCIATIVE] = "IS_RIGHT_ASSOCIATIVE"; + const IS_PREFIX = 1 << 7; TokenType[TokenType["IS_PREFIX"] = IS_PREFIX] = "IS_PREFIX"; + const IS_POSTFIX = 1 << 8; TokenType[TokenType["IS_POSTFIX"] = IS_POSTFIX] = "IS_POSTFIX"; + + const num = 0; TokenType[TokenType["num"] = num] = "num"; // num + const bigint = 512; TokenType[TokenType["bigint"] = bigint] = "bigint"; // bigint + const regexp = 1024; TokenType[TokenType["regexp"] = regexp] = "regexp"; // regexp + const string = 1536; TokenType[TokenType["string"] = string] = "string"; // string + const name = 2048; TokenType[TokenType["name"] = name] = "name"; // name + const eof = 2560; TokenType[TokenType["eof"] = eof] = "eof"; // eof + const bracketL = 3072; TokenType[TokenType["bracketL"] = bracketL] = "bracketL"; // [ + const bracketR = 3584; TokenType[TokenType["bracketR"] = bracketR] = "bracketR"; // ] + const braceL = 4096; TokenType[TokenType["braceL"] = braceL] = "braceL"; // { + const braceBarL = 4608; TokenType[TokenType["braceBarL"] = braceBarL] = "braceBarL"; // {| + const braceR = 5120; TokenType[TokenType["braceR"] = braceR] = "braceR"; // } + const braceBarR = 5632; TokenType[TokenType["braceBarR"] = braceBarR] = "braceBarR"; // |} + const parenL = 6144; TokenType[TokenType["parenL"] = parenL] = "parenL"; // ( + const parenR = 6656; TokenType[TokenType["parenR"] = parenR] = "parenR"; // ) + const comma = 7168; TokenType[TokenType["comma"] = comma] = "comma"; // , + const semi = 7680; TokenType[TokenType["semi"] = semi] = "semi"; // ; + const colon = 8192; TokenType[TokenType["colon"] = colon] = "colon"; // : + const doubleColon = 8704; TokenType[TokenType["doubleColon"] = doubleColon] = "doubleColon"; // :: + const dot = 9216; TokenType[TokenType["dot"] = dot] = "dot"; // . + const question = 9728; TokenType[TokenType["question"] = question] = "question"; // ? + const questionDot = 10240; TokenType[TokenType["questionDot"] = questionDot] = "questionDot"; // ?. + const arrow = 10752; TokenType[TokenType["arrow"] = arrow] = "arrow"; // => + const template = 11264; TokenType[TokenType["template"] = template] = "template"; // template + const ellipsis = 11776; TokenType[TokenType["ellipsis"] = ellipsis] = "ellipsis"; // ... + const backQuote = 12288; TokenType[TokenType["backQuote"] = backQuote] = "backQuote"; // ` + const dollarBraceL = 12800; TokenType[TokenType["dollarBraceL"] = dollarBraceL] = "dollarBraceL"; // ${ + const at = 13312; TokenType[TokenType["at"] = at] = "at"; // @ + const hash = 13824; TokenType[TokenType["hash"] = hash] = "hash"; // # + const eq = 14368; TokenType[TokenType["eq"] = eq] = "eq"; // = isAssign + const assign = 14880; TokenType[TokenType["assign"] = assign] = "assign"; // _= isAssign + const preIncDec = 15744; TokenType[TokenType["preIncDec"] = preIncDec] = "preIncDec"; // ++/-- prefix postfix + const postIncDec = 16256; TokenType[TokenType["postIncDec"] = postIncDec] = "postIncDec"; // ++/-- prefix postfix + const bang = 16512; TokenType[TokenType["bang"] = bang] = "bang"; // ! prefix + const tilde = 17024; TokenType[TokenType["tilde"] = tilde] = "tilde"; // ~ prefix + const pipeline = 17409; TokenType[TokenType["pipeline"] = pipeline] = "pipeline"; // |> prec:1 + const nullishCoalescing = 17922; TokenType[TokenType["nullishCoalescing"] = nullishCoalescing] = "nullishCoalescing"; // ?? prec:2 + const logicalOR = 18434; TokenType[TokenType["logicalOR"] = logicalOR] = "logicalOR"; // || prec:2 + const logicalAND = 18947; TokenType[TokenType["logicalAND"] = logicalAND] = "logicalAND"; // && prec:3 + const bitwiseOR = 19460; TokenType[TokenType["bitwiseOR"] = bitwiseOR] = "bitwiseOR"; // | prec:4 + const bitwiseXOR = 19973; TokenType[TokenType["bitwiseXOR"] = bitwiseXOR] = "bitwiseXOR"; // ^ prec:5 + const bitwiseAND = 20486; TokenType[TokenType["bitwiseAND"] = bitwiseAND] = "bitwiseAND"; // & prec:6 + const equality = 20999; TokenType[TokenType["equality"] = equality] = "equality"; // ==/!= prec:7 + const lessThan = 21512; TokenType[TokenType["lessThan"] = lessThan] = "lessThan"; // < prec:8 + const greaterThan = 22024; TokenType[TokenType["greaterThan"] = greaterThan] = "greaterThan"; // > prec:8 + const relationalOrEqual = 22536; TokenType[TokenType["relationalOrEqual"] = relationalOrEqual] = "relationalOrEqual"; // <=/>= prec:8 + const bitShift = 23049; TokenType[TokenType["bitShift"] = bitShift] = "bitShift"; // <> prec:9 + const plus = 23690; TokenType[TokenType["plus"] = plus] = "plus"; // + prec:10 prefix + const minus = 24202; TokenType[TokenType["minus"] = minus] = "minus"; // - prec:10 prefix + const modulo = 24587; TokenType[TokenType["modulo"] = modulo] = "modulo"; // % prec:11 + const star = 25099; TokenType[TokenType["star"] = star] = "star"; // * prec:11 + const slash = 25611; TokenType[TokenType["slash"] = slash] = "slash"; // / prec:11 + const exponent = 26188; TokenType[TokenType["exponent"] = exponent] = "exponent"; // ** prec:12 rightAssociative + const jsxName = 26624; TokenType[TokenType["jsxName"] = jsxName] = "jsxName"; // jsxName + const jsxText = 27136; TokenType[TokenType["jsxText"] = jsxText] = "jsxText"; // jsxText + const jsxTagStart = 27648; TokenType[TokenType["jsxTagStart"] = jsxTagStart] = "jsxTagStart"; // jsxTagStart + const jsxTagEnd = 28160; TokenType[TokenType["jsxTagEnd"] = jsxTagEnd] = "jsxTagEnd"; // jsxTagEnd + const typeParameterStart = 28672; TokenType[TokenType["typeParameterStart"] = typeParameterStart] = "typeParameterStart"; // typeParameterStart + const nonNullAssertion = 29184; TokenType[TokenType["nonNullAssertion"] = nonNullAssertion] = "nonNullAssertion"; // nonNullAssertion + const _break = 29712; TokenType[TokenType["_break"] = _break] = "_break"; // break keyword + const _case = 30224; TokenType[TokenType["_case"] = _case] = "_case"; // case keyword + const _catch = 30736; TokenType[TokenType["_catch"] = _catch] = "_catch"; // catch keyword + const _continue = 31248; TokenType[TokenType["_continue"] = _continue] = "_continue"; // continue keyword + const _debugger = 31760; TokenType[TokenType["_debugger"] = _debugger] = "_debugger"; // debugger keyword + const _default = 32272; TokenType[TokenType["_default"] = _default] = "_default"; // default keyword + const _do = 32784; TokenType[TokenType["_do"] = _do] = "_do"; // do keyword + const _else = 33296; TokenType[TokenType["_else"] = _else] = "_else"; // else keyword + const _finally = 33808; TokenType[TokenType["_finally"] = _finally] = "_finally"; // finally keyword + const _for = 34320; TokenType[TokenType["_for"] = _for] = "_for"; // for keyword + const _function = 34832; TokenType[TokenType["_function"] = _function] = "_function"; // function keyword + const _if = 35344; TokenType[TokenType["_if"] = _if] = "_if"; // if keyword + const _return = 35856; TokenType[TokenType["_return"] = _return] = "_return"; // return keyword + const _switch = 36368; TokenType[TokenType["_switch"] = _switch] = "_switch"; // switch keyword + const _throw = 37008; TokenType[TokenType["_throw"] = _throw] = "_throw"; // throw keyword prefix + const _try = 37392; TokenType[TokenType["_try"] = _try] = "_try"; // try keyword + const _var = 37904; TokenType[TokenType["_var"] = _var] = "_var"; // var keyword + const _let = 38416; TokenType[TokenType["_let"] = _let] = "_let"; // let keyword + const _const = 38928; TokenType[TokenType["_const"] = _const] = "_const"; // const keyword + const _while = 39440; TokenType[TokenType["_while"] = _while] = "_while"; // while keyword + const _with = 39952; TokenType[TokenType["_with"] = _with] = "_with"; // with keyword + const _new = 40464; TokenType[TokenType["_new"] = _new] = "_new"; // new keyword + const _this = 40976; TokenType[TokenType["_this"] = _this] = "_this"; // this keyword + const _super = 41488; TokenType[TokenType["_super"] = _super] = "_super"; // super keyword + const _class = 42000; TokenType[TokenType["_class"] = _class] = "_class"; // class keyword + const _extends = 42512; TokenType[TokenType["_extends"] = _extends] = "_extends"; // extends keyword + const _export = 43024; TokenType[TokenType["_export"] = _export] = "_export"; // export keyword + const _import = 43536; TokenType[TokenType["_import"] = _import] = "_import"; // import keyword + const _yield = 44048; TokenType[TokenType["_yield"] = _yield] = "_yield"; // yield keyword + const _null = 44560; TokenType[TokenType["_null"] = _null] = "_null"; // null keyword + const _true = 45072; TokenType[TokenType["_true"] = _true] = "_true"; // true keyword + const _false = 45584; TokenType[TokenType["_false"] = _false] = "_false"; // false keyword + const _in = 46104; TokenType[TokenType["_in"] = _in] = "_in"; // in prec:8 keyword + const _instanceof = 46616; TokenType[TokenType["_instanceof"] = _instanceof] = "_instanceof"; // instanceof prec:8 keyword + const _typeof = 47248; TokenType[TokenType["_typeof"] = _typeof] = "_typeof"; // typeof keyword prefix + const _void = 47760; TokenType[TokenType["_void"] = _void] = "_void"; // void keyword prefix + const _delete = 48272; TokenType[TokenType["_delete"] = _delete] = "_delete"; // delete keyword prefix + const _async = 48656; TokenType[TokenType["_async"] = _async] = "_async"; // async keyword + const _get = 49168; TokenType[TokenType["_get"] = _get] = "_get"; // get keyword + const _set = 49680; TokenType[TokenType["_set"] = _set] = "_set"; // set keyword + const _declare = 50192; TokenType[TokenType["_declare"] = _declare] = "_declare"; // declare keyword + const _readonly = 50704; TokenType[TokenType["_readonly"] = _readonly] = "_readonly"; // readonly keyword + const _abstract = 51216; TokenType[TokenType["_abstract"] = _abstract] = "_abstract"; // abstract keyword + const _static = 51728; TokenType[TokenType["_static"] = _static] = "_static"; // static keyword + const _public = 52240; TokenType[TokenType["_public"] = _public] = "_public"; // public keyword + const _private = 52752; TokenType[TokenType["_private"] = _private] = "_private"; // private keyword + const _protected = 53264; TokenType[TokenType["_protected"] = _protected] = "_protected"; // protected keyword + const _as = 53776; TokenType[TokenType["_as"] = _as] = "_as"; // as keyword + const _enum = 54288; TokenType[TokenType["_enum"] = _enum] = "_enum"; // enum keyword + const _type = 54800; TokenType[TokenType["_type"] = _type] = "_type"; // type keyword + const _implements = 55312; TokenType[TokenType["_implements"] = _implements] = "_implements"; // implements keyword +})(TokenType || (exports.TokenType = TokenType = {})); + function formatTokenType(tokenType) { + switch (tokenType) { + case TokenType.num: + return "num"; + case TokenType.bigint: + return "bigint"; + case TokenType.regexp: + return "regexp"; + case TokenType.string: + return "string"; + case TokenType.name: + return "name"; + case TokenType.eof: + return "eof"; + case TokenType.bracketL: + return "["; + case TokenType.bracketR: + return "]"; + case TokenType.braceL: + return "{"; + case TokenType.braceBarL: + return "{|"; + case TokenType.braceR: + return "}"; + case TokenType.braceBarR: + return "|}"; + case TokenType.parenL: + return "("; + case TokenType.parenR: + return ")"; + case TokenType.comma: + return ","; + case TokenType.semi: + return ";"; + case TokenType.colon: + return ":"; + case TokenType.doubleColon: + return "::"; + case TokenType.dot: + return "."; + case TokenType.question: + return "?"; + case TokenType.questionDot: + return "?."; + case TokenType.arrow: + return "=>"; + case TokenType.template: + return "template"; + case TokenType.ellipsis: + return "..."; + case TokenType.backQuote: + return "`"; + case TokenType.dollarBraceL: + return "${"; + case TokenType.at: + return "@"; + case TokenType.hash: + return "#"; + case TokenType.eq: + return "="; + case TokenType.assign: + return "_="; + case TokenType.preIncDec: + return "++/--"; + case TokenType.postIncDec: + return "++/--"; + case TokenType.bang: + return "!"; + case TokenType.tilde: + return "~"; + case TokenType.pipeline: + return "|>"; + case TokenType.nullishCoalescing: + return "??"; + case TokenType.logicalOR: + return "||"; + case TokenType.logicalAND: + return "&&"; + case TokenType.bitwiseOR: + return "|"; + case TokenType.bitwiseXOR: + return "^"; + case TokenType.bitwiseAND: + return "&"; + case TokenType.equality: + return "==/!="; + case TokenType.lessThan: + return "<"; + case TokenType.greaterThan: + return ">"; + case TokenType.relationalOrEqual: + return "<=/>="; + case TokenType.bitShift: + return "<>"; + case TokenType.plus: + return "+"; + case TokenType.minus: + return "-"; + case TokenType.modulo: + return "%"; + case TokenType.star: + return "*"; + case TokenType.slash: + return "/"; + case TokenType.exponent: + return "**"; + case TokenType.jsxName: + return "jsxName"; + case TokenType.jsxText: + return "jsxText"; + case TokenType.jsxTagStart: + return "jsxTagStart"; + case TokenType.jsxTagEnd: + return "jsxTagEnd"; + case TokenType.typeParameterStart: + return "typeParameterStart"; + case TokenType.nonNullAssertion: + return "nonNullAssertion"; + case TokenType._break: + return "break"; + case TokenType._case: + return "case"; + case TokenType._catch: + return "catch"; + case TokenType._continue: + return "continue"; + case TokenType._debugger: + return "debugger"; + case TokenType._default: + return "default"; + case TokenType._do: + return "do"; + case TokenType._else: + return "else"; + case TokenType._finally: + return "finally"; + case TokenType._for: + return "for"; + case TokenType._function: + return "function"; + case TokenType._if: + return "if"; + case TokenType._return: + return "return"; + case TokenType._switch: + return "switch"; + case TokenType._throw: + return "throw"; + case TokenType._try: + return "try"; + case TokenType._var: + return "var"; + case TokenType._let: + return "let"; + case TokenType._const: + return "const"; + case TokenType._while: + return "while"; + case TokenType._with: + return "with"; + case TokenType._new: + return "new"; + case TokenType._this: + return "this"; + case TokenType._super: + return "super"; + case TokenType._class: + return "class"; + case TokenType._extends: + return "extends"; + case TokenType._export: + return "export"; + case TokenType._import: + return "import"; + case TokenType._yield: + return "yield"; + case TokenType._null: + return "null"; + case TokenType._true: + return "true"; + case TokenType._false: + return "false"; + case TokenType._in: + return "in"; + case TokenType._instanceof: + return "instanceof"; + case TokenType._typeof: + return "typeof"; + case TokenType._void: + return "void"; + case TokenType._delete: + return "delete"; + case TokenType._async: + return "async"; + case TokenType._get: + return "get"; + case TokenType._set: + return "set"; + case TokenType._declare: + return "declare"; + case TokenType._readonly: + return "readonly"; + case TokenType._abstract: + return "abstract"; + case TokenType._static: + return "static"; + case TokenType._public: + return "public"; + case TokenType._private: + return "private"; + case TokenType._protected: + return "protected"; + case TokenType._as: + return "as"; + case TokenType._enum: + return "enum"; + case TokenType._type: + return "type"; + case TokenType._implements: + return "implements"; + default: + return ""; + } +} exports.formatTokenType = formatTokenType; diff --git a/node_modules/sucrase/dist/parser/tokenizer/types.mjs b/node_modules/sucrase/dist/parser/tokenizer/types.mjs new file mode 100644 index 00000000..726ea659 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/types.mjs @@ -0,0 +1,347 @@ +// Generated file, do not edit! Run "yarn generate" to re-generate this file. +/** + * Enum of all token types, with bit fields to signify meaningful properties. + */ +export var TokenType; (function (TokenType) { + // Precedence 0 means not an operator; otherwise it is a positive number up to 12. + const PRECEDENCE_MASK = 0xf; TokenType[TokenType["PRECEDENCE_MASK"] = PRECEDENCE_MASK] = "PRECEDENCE_MASK"; + const IS_KEYWORD = 1 << 4; TokenType[TokenType["IS_KEYWORD"] = IS_KEYWORD] = "IS_KEYWORD"; + const IS_ASSIGN = 1 << 5; TokenType[TokenType["IS_ASSIGN"] = IS_ASSIGN] = "IS_ASSIGN"; + const IS_RIGHT_ASSOCIATIVE = 1 << 6; TokenType[TokenType["IS_RIGHT_ASSOCIATIVE"] = IS_RIGHT_ASSOCIATIVE] = "IS_RIGHT_ASSOCIATIVE"; + const IS_PREFIX = 1 << 7; TokenType[TokenType["IS_PREFIX"] = IS_PREFIX] = "IS_PREFIX"; + const IS_POSTFIX = 1 << 8; TokenType[TokenType["IS_POSTFIX"] = IS_POSTFIX] = "IS_POSTFIX"; + + const num = 0; TokenType[TokenType["num"] = num] = "num"; // num + const bigint = 512; TokenType[TokenType["bigint"] = bigint] = "bigint"; // bigint + const regexp = 1024; TokenType[TokenType["regexp"] = regexp] = "regexp"; // regexp + const string = 1536; TokenType[TokenType["string"] = string] = "string"; // string + const name = 2048; TokenType[TokenType["name"] = name] = "name"; // name + const eof = 2560; TokenType[TokenType["eof"] = eof] = "eof"; // eof + const bracketL = 3072; TokenType[TokenType["bracketL"] = bracketL] = "bracketL"; // [ + const bracketR = 3584; TokenType[TokenType["bracketR"] = bracketR] = "bracketR"; // ] + const braceL = 4096; TokenType[TokenType["braceL"] = braceL] = "braceL"; // { + const braceBarL = 4608; TokenType[TokenType["braceBarL"] = braceBarL] = "braceBarL"; // {| + const braceR = 5120; TokenType[TokenType["braceR"] = braceR] = "braceR"; // } + const braceBarR = 5632; TokenType[TokenType["braceBarR"] = braceBarR] = "braceBarR"; // |} + const parenL = 6144; TokenType[TokenType["parenL"] = parenL] = "parenL"; // ( + const parenR = 6656; TokenType[TokenType["parenR"] = parenR] = "parenR"; // ) + const comma = 7168; TokenType[TokenType["comma"] = comma] = "comma"; // , + const semi = 7680; TokenType[TokenType["semi"] = semi] = "semi"; // ; + const colon = 8192; TokenType[TokenType["colon"] = colon] = "colon"; // : + const doubleColon = 8704; TokenType[TokenType["doubleColon"] = doubleColon] = "doubleColon"; // :: + const dot = 9216; TokenType[TokenType["dot"] = dot] = "dot"; // . + const question = 9728; TokenType[TokenType["question"] = question] = "question"; // ? + const questionDot = 10240; TokenType[TokenType["questionDot"] = questionDot] = "questionDot"; // ?. + const arrow = 10752; TokenType[TokenType["arrow"] = arrow] = "arrow"; // => + const template = 11264; TokenType[TokenType["template"] = template] = "template"; // template + const ellipsis = 11776; TokenType[TokenType["ellipsis"] = ellipsis] = "ellipsis"; // ... + const backQuote = 12288; TokenType[TokenType["backQuote"] = backQuote] = "backQuote"; // ` + const dollarBraceL = 12800; TokenType[TokenType["dollarBraceL"] = dollarBraceL] = "dollarBraceL"; // ${ + const at = 13312; TokenType[TokenType["at"] = at] = "at"; // @ + const hash = 13824; TokenType[TokenType["hash"] = hash] = "hash"; // # + const eq = 14368; TokenType[TokenType["eq"] = eq] = "eq"; // = isAssign + const assign = 14880; TokenType[TokenType["assign"] = assign] = "assign"; // _= isAssign + const preIncDec = 15744; TokenType[TokenType["preIncDec"] = preIncDec] = "preIncDec"; // ++/-- prefix postfix + const postIncDec = 16256; TokenType[TokenType["postIncDec"] = postIncDec] = "postIncDec"; // ++/-- prefix postfix + const bang = 16512; TokenType[TokenType["bang"] = bang] = "bang"; // ! prefix + const tilde = 17024; TokenType[TokenType["tilde"] = tilde] = "tilde"; // ~ prefix + const pipeline = 17409; TokenType[TokenType["pipeline"] = pipeline] = "pipeline"; // |> prec:1 + const nullishCoalescing = 17922; TokenType[TokenType["nullishCoalescing"] = nullishCoalescing] = "nullishCoalescing"; // ?? prec:2 + const logicalOR = 18434; TokenType[TokenType["logicalOR"] = logicalOR] = "logicalOR"; // || prec:2 + const logicalAND = 18947; TokenType[TokenType["logicalAND"] = logicalAND] = "logicalAND"; // && prec:3 + const bitwiseOR = 19460; TokenType[TokenType["bitwiseOR"] = bitwiseOR] = "bitwiseOR"; // | prec:4 + const bitwiseXOR = 19973; TokenType[TokenType["bitwiseXOR"] = bitwiseXOR] = "bitwiseXOR"; // ^ prec:5 + const bitwiseAND = 20486; TokenType[TokenType["bitwiseAND"] = bitwiseAND] = "bitwiseAND"; // & prec:6 + const equality = 20999; TokenType[TokenType["equality"] = equality] = "equality"; // ==/!= prec:7 + const lessThan = 21512; TokenType[TokenType["lessThan"] = lessThan] = "lessThan"; // < prec:8 + const greaterThan = 22024; TokenType[TokenType["greaterThan"] = greaterThan] = "greaterThan"; // > prec:8 + const relationalOrEqual = 22536; TokenType[TokenType["relationalOrEqual"] = relationalOrEqual] = "relationalOrEqual"; // <=/>= prec:8 + const bitShift = 23049; TokenType[TokenType["bitShift"] = bitShift] = "bitShift"; // <> prec:9 + const plus = 23690; TokenType[TokenType["plus"] = plus] = "plus"; // + prec:10 prefix + const minus = 24202; TokenType[TokenType["minus"] = minus] = "minus"; // - prec:10 prefix + const modulo = 24587; TokenType[TokenType["modulo"] = modulo] = "modulo"; // % prec:11 + const star = 25099; TokenType[TokenType["star"] = star] = "star"; // * prec:11 + const slash = 25611; TokenType[TokenType["slash"] = slash] = "slash"; // / prec:11 + const exponent = 26188; TokenType[TokenType["exponent"] = exponent] = "exponent"; // ** prec:12 rightAssociative + const jsxName = 26624; TokenType[TokenType["jsxName"] = jsxName] = "jsxName"; // jsxName + const jsxText = 27136; TokenType[TokenType["jsxText"] = jsxText] = "jsxText"; // jsxText + const jsxTagStart = 27648; TokenType[TokenType["jsxTagStart"] = jsxTagStart] = "jsxTagStart"; // jsxTagStart + const jsxTagEnd = 28160; TokenType[TokenType["jsxTagEnd"] = jsxTagEnd] = "jsxTagEnd"; // jsxTagEnd + const typeParameterStart = 28672; TokenType[TokenType["typeParameterStart"] = typeParameterStart] = "typeParameterStart"; // typeParameterStart + const nonNullAssertion = 29184; TokenType[TokenType["nonNullAssertion"] = nonNullAssertion] = "nonNullAssertion"; // nonNullAssertion + const _break = 29712; TokenType[TokenType["_break"] = _break] = "_break"; // break keyword + const _case = 30224; TokenType[TokenType["_case"] = _case] = "_case"; // case keyword + const _catch = 30736; TokenType[TokenType["_catch"] = _catch] = "_catch"; // catch keyword + const _continue = 31248; TokenType[TokenType["_continue"] = _continue] = "_continue"; // continue keyword + const _debugger = 31760; TokenType[TokenType["_debugger"] = _debugger] = "_debugger"; // debugger keyword + const _default = 32272; TokenType[TokenType["_default"] = _default] = "_default"; // default keyword + const _do = 32784; TokenType[TokenType["_do"] = _do] = "_do"; // do keyword + const _else = 33296; TokenType[TokenType["_else"] = _else] = "_else"; // else keyword + const _finally = 33808; TokenType[TokenType["_finally"] = _finally] = "_finally"; // finally keyword + const _for = 34320; TokenType[TokenType["_for"] = _for] = "_for"; // for keyword + const _function = 34832; TokenType[TokenType["_function"] = _function] = "_function"; // function keyword + const _if = 35344; TokenType[TokenType["_if"] = _if] = "_if"; // if keyword + const _return = 35856; TokenType[TokenType["_return"] = _return] = "_return"; // return keyword + const _switch = 36368; TokenType[TokenType["_switch"] = _switch] = "_switch"; // switch keyword + const _throw = 37008; TokenType[TokenType["_throw"] = _throw] = "_throw"; // throw keyword prefix + const _try = 37392; TokenType[TokenType["_try"] = _try] = "_try"; // try keyword + const _var = 37904; TokenType[TokenType["_var"] = _var] = "_var"; // var keyword + const _let = 38416; TokenType[TokenType["_let"] = _let] = "_let"; // let keyword + const _const = 38928; TokenType[TokenType["_const"] = _const] = "_const"; // const keyword + const _while = 39440; TokenType[TokenType["_while"] = _while] = "_while"; // while keyword + const _with = 39952; TokenType[TokenType["_with"] = _with] = "_with"; // with keyword + const _new = 40464; TokenType[TokenType["_new"] = _new] = "_new"; // new keyword + const _this = 40976; TokenType[TokenType["_this"] = _this] = "_this"; // this keyword + const _super = 41488; TokenType[TokenType["_super"] = _super] = "_super"; // super keyword + const _class = 42000; TokenType[TokenType["_class"] = _class] = "_class"; // class keyword + const _extends = 42512; TokenType[TokenType["_extends"] = _extends] = "_extends"; // extends keyword + const _export = 43024; TokenType[TokenType["_export"] = _export] = "_export"; // export keyword + const _import = 43536; TokenType[TokenType["_import"] = _import] = "_import"; // import keyword + const _yield = 44048; TokenType[TokenType["_yield"] = _yield] = "_yield"; // yield keyword + const _null = 44560; TokenType[TokenType["_null"] = _null] = "_null"; // null keyword + const _true = 45072; TokenType[TokenType["_true"] = _true] = "_true"; // true keyword + const _false = 45584; TokenType[TokenType["_false"] = _false] = "_false"; // false keyword + const _in = 46104; TokenType[TokenType["_in"] = _in] = "_in"; // in prec:8 keyword + const _instanceof = 46616; TokenType[TokenType["_instanceof"] = _instanceof] = "_instanceof"; // instanceof prec:8 keyword + const _typeof = 47248; TokenType[TokenType["_typeof"] = _typeof] = "_typeof"; // typeof keyword prefix + const _void = 47760; TokenType[TokenType["_void"] = _void] = "_void"; // void keyword prefix + const _delete = 48272; TokenType[TokenType["_delete"] = _delete] = "_delete"; // delete keyword prefix + const _async = 48656; TokenType[TokenType["_async"] = _async] = "_async"; // async keyword + const _get = 49168; TokenType[TokenType["_get"] = _get] = "_get"; // get keyword + const _set = 49680; TokenType[TokenType["_set"] = _set] = "_set"; // set keyword + const _declare = 50192; TokenType[TokenType["_declare"] = _declare] = "_declare"; // declare keyword + const _readonly = 50704; TokenType[TokenType["_readonly"] = _readonly] = "_readonly"; // readonly keyword + const _abstract = 51216; TokenType[TokenType["_abstract"] = _abstract] = "_abstract"; // abstract keyword + const _static = 51728; TokenType[TokenType["_static"] = _static] = "_static"; // static keyword + const _public = 52240; TokenType[TokenType["_public"] = _public] = "_public"; // public keyword + const _private = 52752; TokenType[TokenType["_private"] = _private] = "_private"; // private keyword + const _protected = 53264; TokenType[TokenType["_protected"] = _protected] = "_protected"; // protected keyword + const _as = 53776; TokenType[TokenType["_as"] = _as] = "_as"; // as keyword + const _enum = 54288; TokenType[TokenType["_enum"] = _enum] = "_enum"; // enum keyword + const _type = 54800; TokenType[TokenType["_type"] = _type] = "_type"; // type keyword + const _implements = 55312; TokenType[TokenType["_implements"] = _implements] = "_implements"; // implements keyword +})(TokenType || (TokenType = {})); +export function formatTokenType(tokenType) { + switch (tokenType) { + case TokenType.num: + return "num"; + case TokenType.bigint: + return "bigint"; + case TokenType.regexp: + return "regexp"; + case TokenType.string: + return "string"; + case TokenType.name: + return "name"; + case TokenType.eof: + return "eof"; + case TokenType.bracketL: + return "["; + case TokenType.bracketR: + return "]"; + case TokenType.braceL: + return "{"; + case TokenType.braceBarL: + return "{|"; + case TokenType.braceR: + return "}"; + case TokenType.braceBarR: + return "|}"; + case TokenType.parenL: + return "("; + case TokenType.parenR: + return ")"; + case TokenType.comma: + return ","; + case TokenType.semi: + return ";"; + case TokenType.colon: + return ":"; + case TokenType.doubleColon: + return "::"; + case TokenType.dot: + return "."; + case TokenType.question: + return "?"; + case TokenType.questionDot: + return "?."; + case TokenType.arrow: + return "=>"; + case TokenType.template: + return "template"; + case TokenType.ellipsis: + return "..."; + case TokenType.backQuote: + return "`"; + case TokenType.dollarBraceL: + return "${"; + case TokenType.at: + return "@"; + case TokenType.hash: + return "#"; + case TokenType.eq: + return "="; + case TokenType.assign: + return "_="; + case TokenType.preIncDec: + return "++/--"; + case TokenType.postIncDec: + return "++/--"; + case TokenType.bang: + return "!"; + case TokenType.tilde: + return "~"; + case TokenType.pipeline: + return "|>"; + case TokenType.nullishCoalescing: + return "??"; + case TokenType.logicalOR: + return "||"; + case TokenType.logicalAND: + return "&&"; + case TokenType.bitwiseOR: + return "|"; + case TokenType.bitwiseXOR: + return "^"; + case TokenType.bitwiseAND: + return "&"; + case TokenType.equality: + return "==/!="; + case TokenType.lessThan: + return "<"; + case TokenType.greaterThan: + return ">"; + case TokenType.relationalOrEqual: + return "<=/>="; + case TokenType.bitShift: + return "<>"; + case TokenType.plus: + return "+"; + case TokenType.minus: + return "-"; + case TokenType.modulo: + return "%"; + case TokenType.star: + return "*"; + case TokenType.slash: + return "/"; + case TokenType.exponent: + return "**"; + case TokenType.jsxName: + return "jsxName"; + case TokenType.jsxText: + return "jsxText"; + case TokenType.jsxTagStart: + return "jsxTagStart"; + case TokenType.jsxTagEnd: + return "jsxTagEnd"; + case TokenType.typeParameterStart: + return "typeParameterStart"; + case TokenType.nonNullAssertion: + return "nonNullAssertion"; + case TokenType._break: + return "break"; + case TokenType._case: + return "case"; + case TokenType._catch: + return "catch"; + case TokenType._continue: + return "continue"; + case TokenType._debugger: + return "debugger"; + case TokenType._default: + return "default"; + case TokenType._do: + return "do"; + case TokenType._else: + return "else"; + case TokenType._finally: + return "finally"; + case TokenType._for: + return "for"; + case TokenType._function: + return "function"; + case TokenType._if: + return "if"; + case TokenType._return: + return "return"; + case TokenType._switch: + return "switch"; + case TokenType._throw: + return "throw"; + case TokenType._try: + return "try"; + case TokenType._var: + return "var"; + case TokenType._let: + return "let"; + case TokenType._const: + return "const"; + case TokenType._while: + return "while"; + case TokenType._with: + return "with"; + case TokenType._new: + return "new"; + case TokenType._this: + return "this"; + case TokenType._super: + return "super"; + case TokenType._class: + return "class"; + case TokenType._extends: + return "extends"; + case TokenType._export: + return "export"; + case TokenType._import: + return "import"; + case TokenType._yield: + return "yield"; + case TokenType._null: + return "null"; + case TokenType._true: + return "true"; + case TokenType._false: + return "false"; + case TokenType._in: + return "in"; + case TokenType._instanceof: + return "instanceof"; + case TokenType._typeof: + return "typeof"; + case TokenType._void: + return "void"; + case TokenType._delete: + return "delete"; + case TokenType._async: + return "async"; + case TokenType._get: + return "get"; + case TokenType._set: + return "set"; + case TokenType._declare: + return "declare"; + case TokenType._readonly: + return "readonly"; + case TokenType._abstract: + return "abstract"; + case TokenType._static: + return "static"; + case TokenType._public: + return "public"; + case TokenType._private: + return "private"; + case TokenType._protected: + return "protected"; + case TokenType._as: + return "as"; + case TokenType._enum: + return "enum"; + case TokenType._type: + return "type"; + case TokenType._implements: + return "implements"; + default: + return ""; + } +} diff --git a/node_modules/sucrase/dist/parser/traverser/base.d.ts b/node_modules/sucrase/dist/parser/traverser/base.d.ts new file mode 100644 index 00000000..e15e8928 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/base.d.ts @@ -0,0 +1,16 @@ +import State from "../tokenizer/state"; +export declare let isJSXEnabled: boolean; +export declare let isTypeScriptEnabled: boolean; +export declare let isFlowEnabled: boolean; +export declare let state: State; +export declare let input: string; +export declare let nextContextId: number; +export declare function getNextContextId(): number; +export declare function augmentError(error: any): any; +export declare class Loc { + line: number; + column: number; + constructor(line: number, column: number); +} +export declare function locationForIndex(pos: number): Loc; +export declare function initParser(inputCode: string, isJSXEnabledArg: boolean, isTypeScriptEnabledArg: boolean, isFlowEnabledArg: boolean): void; diff --git a/node_modules/sucrase/dist/parser/traverser/base.js b/node_modules/sucrase/dist/parser/traverser/base.js new file mode 100644 index 00000000..dcf5696a --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/base.js @@ -0,0 +1,60 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _state = require('../tokenizer/state'); var _state2 = _interopRequireDefault(_state); +var _charcodes = require('../util/charcodes'); + + exports.isJSXEnabled; + exports.isTypeScriptEnabled; + exports.isFlowEnabled; + exports.state; + exports.input; + exports.nextContextId; + + function getNextContextId() { + return exports.nextContextId++; +} exports.getNextContextId = getNextContextId; + +// tslint:disable-next-line no-any + function augmentError(error) { + if ("pos" in error) { + const loc = locationForIndex(error.pos); + error.message += ` (${loc.line}:${loc.column})`; + error.loc = loc; + } + return error; +} exports.augmentError = augmentError; + + class Loc { + + + constructor(line, column) { + this.line = line; + this.column = column; + } +} exports.Loc = Loc; + + function locationForIndex(pos) { + let line = 1; + let column = 1; + for (let i = 0; i < pos; i++) { + if (exports.input.charCodeAt(i) === _charcodes.charCodes.lineFeed) { + line++; + column = 1; + } else { + column++; + } + } + return new Loc(line, column); +} exports.locationForIndex = locationForIndex; + + function initParser( + inputCode, + isJSXEnabledArg, + isTypeScriptEnabledArg, + isFlowEnabledArg, +) { + exports.input = inputCode; + exports.state = new (0, _state2.default)(); + exports.nextContextId = 1; + exports.isJSXEnabled = isJSXEnabledArg; + exports.isTypeScriptEnabled = isTypeScriptEnabledArg; + exports.isFlowEnabled = isFlowEnabledArg; +} exports.initParser = initParser; diff --git a/node_modules/sucrase/dist/parser/traverser/base.mjs b/node_modules/sucrase/dist/parser/traverser/base.mjs new file mode 100644 index 00000000..1dd0cffd --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/base.mjs @@ -0,0 +1,60 @@ +import State from "../tokenizer/state"; +import {charCodes} from "../util/charcodes"; + +export let isJSXEnabled; +export let isTypeScriptEnabled; +export let isFlowEnabled; +export let state; +export let input; +export let nextContextId; + +export function getNextContextId() { + return nextContextId++; +} + +// tslint:disable-next-line no-any +export function augmentError(error) { + if ("pos" in error) { + const loc = locationForIndex(error.pos); + error.message += ` (${loc.line}:${loc.column})`; + error.loc = loc; + } + return error; +} + +export class Loc { + + + constructor(line, column) { + this.line = line; + this.column = column; + } +} + +export function locationForIndex(pos) { + let line = 1; + let column = 1; + for (let i = 0; i < pos; i++) { + if (input.charCodeAt(i) === charCodes.lineFeed) { + line++; + column = 1; + } else { + column++; + } + } + return new Loc(line, column); +} + +export function initParser( + inputCode, + isJSXEnabledArg, + isTypeScriptEnabledArg, + isFlowEnabledArg, +) { + input = inputCode; + state = new State(); + nextContextId = 1; + isJSXEnabled = isJSXEnabledArg; + isTypeScriptEnabled = isTypeScriptEnabledArg; + isFlowEnabled = isFlowEnabledArg; +} diff --git a/node_modules/sucrase/dist/parser/traverser/expression.d.ts b/node_modules/sucrase/dist/parser/traverser/expression.d.ts new file mode 100644 index 00000000..a88e6a3b --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/expression.d.ts @@ -0,0 +1,34 @@ +export declare class StopState { + stop: boolean; + constructor(stop: boolean); +} +export declare function parseExpression(noIn?: boolean): void; +/** + * noIn is used when parsing a for loop so that we don't interpret a following "in" as the binary + * operatior. + * isWithinParens is used to indicate that we're parsing something that might be a comma expression + * or might be an arrow function or might be a Flow type assertion (which requires explicit parens). + * In these cases, we should allow : and ?: after the initial "left" part. + */ +export declare function parseMaybeAssign(noIn?: boolean, isWithinParens?: boolean): boolean; +export declare function baseParseMaybeAssign(noIn: boolean, isWithinParens: boolean): boolean; +export declare function baseParseConditional(noIn: boolean): void; +export declare function parseMaybeUnary(): boolean; +export declare function parseExprSubscripts(): boolean; +export declare function baseParseSubscripts(startPos: number, noCalls?: boolean): void; +/** Set 'state.stop = true' to indicate that we should stop parsing subscripts. */ +export declare function baseParseSubscript(startPos: number, noCalls: boolean, stopState: StopState): void; +export declare function atPossibleAsync(): boolean; +export declare function parseCallExpressionArguments(): void; +export declare function parseExprAtom(): boolean; +export declare function parseLiteral(): void; +export declare function parseParenExpression(): void; +export declare function parseArrow(): boolean; +export declare function parseTemplate(): void; +export declare function parseObj(isPattern: boolean, isBlockScope: boolean): void; +export declare function parsePropertyName(objectContextId: number): void; +export declare function parseMethod(functionStart: number, isConstructor: boolean): void; +export declare function parseArrowExpression(startTokenIndex: number): void; +export declare function parseFunctionBodyAndFinish(functionStart: number, funcContextId?: number): void; +export declare function parseFunctionBody(allowExpression: boolean, funcContextId?: number): void; +export declare function parseIdentifier(): void; diff --git a/node_modules/sucrase/dist/parser/traverser/expression.js b/node_modules/sucrase/dist/parser/traverser/expression.js new file mode 100644 index 00000000..5b687753 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/expression.js @@ -0,0 +1,950 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});/* eslint max-len: 0 */ + +// A recursive descent parser operates by defining functions for all +// syntactic elements, and recursively calling those, each function +// advancing the input stream and returning an AST node. Precedence +// of constructs (for example, the fact that `!x[1]` means `!(x[1])` +// instead of `(!x)[1]` is handled by the fact that the parser +// function that parses unary prefix operators is called first, and +// in turn calls the function that parses `[]` subscripts — that +// way, it'll receive the node for `x[1]` already parsed, and wraps +// *that* in the unary operator node. +// +// Acorn uses an [operator precedence parser][opp] to handle binary +// operator precedence, because it is much more compact than using +// the technique outlined above, which uses different, nesting +// functions to specify precedence, for all of the ten binary +// precedence levels that JavaScript defines. +// +// [opp]: http://en.wikipedia.org/wiki/Operator-precedence_parser + + + + + + + + + + + +var _flow = require('../plugins/flow'); +var _index = require('../plugins/jsx/index'); +var _types = require('../plugins/types'); + + + + + + + + + + +var _typescript = require('../plugins/typescript'); + + + + + + + + + + +var _index3 = require('../tokenizer/index'); +var _keywords = require('../tokenizer/keywords'); +var _state = require('../tokenizer/state'); +var _types3 = require('../tokenizer/types'); +var _base = require('./base'); + + + + + + +var _lval = require('./lval'); + + + + + + +var _statement = require('./statement'); + + + + + + + +var _util = require('./util'); + + class StopState { + + constructor(stop) { + this.stop = stop; + } +} exports.StopState = StopState; + +// ### Expression parsing + +// These nest, from the most general expression type at the top to +// 'atomic', nondivisible expression types at the bottom. Most of +// the functions will simply let the function (s) below them parse, +// and, *if* the syntactic construct they handle is present, wrap +// the AST node that the inner parser gave them in another node. + function parseExpression(noIn = false) { + parseMaybeAssign(noIn); + if (_index3.match.call(void 0, _types3.TokenType.comma)) { + while (_index3.eat.call(void 0, _types3.TokenType.comma)) { + parseMaybeAssign(noIn); + } + } +} exports.parseExpression = parseExpression; + +/** + * noIn is used when parsing a for loop so that we don't interpret a following "in" as the binary + * operatior. + * isWithinParens is used to indicate that we're parsing something that might be a comma expression + * or might be an arrow function or might be a Flow type assertion (which requires explicit parens). + * In these cases, we should allow : and ?: after the initial "left" part. + */ + function parseMaybeAssign(noIn = false, isWithinParens = false) { + if (_base.isTypeScriptEnabled) { + return _typescript.tsParseMaybeAssign.call(void 0, noIn, isWithinParens); + } else if (_base.isFlowEnabled) { + return _flow.flowParseMaybeAssign.call(void 0, noIn, isWithinParens); + } else { + return baseParseMaybeAssign(noIn, isWithinParens); + } +} exports.parseMaybeAssign = parseMaybeAssign; + +// Parse an assignment expression. This includes applications of +// operators like `+=`. +// Returns true if the expression was an arrow function. + function baseParseMaybeAssign(noIn, isWithinParens) { + if (_index3.match.call(void 0, _types3.TokenType._yield)) { + parseYield(); + return false; + } + + if (_index3.match.call(void 0, _types3.TokenType.parenL) || _index3.match.call(void 0, _types3.TokenType.name) || _index3.match.call(void 0, _types3.TokenType._yield)) { + _base.state.potentialArrowAt = _base.state.start; + } + + const wasArrow = parseMaybeConditional(noIn); + if (isWithinParens) { + parseParenItem(); + } + if (_base.state.type & _types3.TokenType.IS_ASSIGN) { + _index3.next.call(void 0, ); + parseMaybeAssign(noIn); + return false; + } + return wasArrow; +} exports.baseParseMaybeAssign = baseParseMaybeAssign; + +// Parse a ternary conditional (`?:`) operator. +// Returns true if the expression was an arrow function. +function parseMaybeConditional(noIn) { + const wasArrow = parseExprOps(noIn); + if (wasArrow) { + return true; + } + parseConditional(noIn); + return false; +} + +function parseConditional(noIn) { + if (_base.isTypeScriptEnabled || _base.isFlowEnabled) { + _types.typedParseConditional.call(void 0, noIn); + } else { + baseParseConditional(noIn); + } +} + + function baseParseConditional(noIn) { + if (_index3.eat.call(void 0, _types3.TokenType.question)) { + parseMaybeAssign(); + _util.expect.call(void 0, _types3.TokenType.colon); + parseMaybeAssign(noIn); + } +} exports.baseParseConditional = baseParseConditional; + +// Start the precedence parser. +// Returns true if this was an arrow function +function parseExprOps(noIn) { + const wasArrow = parseMaybeUnary(); + if (wasArrow) { + return true; + } + parseExprOp(-1, noIn); + return false; +} + +// Parse binary operators with the operator precedence parsing +// algorithm. `left` is the left-hand side of the operator. +// `minPrec` provides context that allows the function to stop and +// defer further parser to one of its callers when it encounters an +// operator that has a lower precedence than the set it is parsing. +function parseExprOp(minPrec, noIn) { + if ( + _base.isTypeScriptEnabled && + (_types3.TokenType._in & _types3.TokenType.PRECEDENCE_MASK) > minPrec && + !_util.hasPrecedingLineBreak.call(void 0, ) && + _util.eatContextual.call(void 0, _keywords.ContextualKeyword._as) + ) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types3.TokenType._as; + const oldIsType = _index3.pushTypeContext.call(void 0, 1); + _typescript.tsParseType.call(void 0, ); + _index3.popTypeContext.call(void 0, oldIsType); + parseExprOp(minPrec, noIn); + return; + } + + const prec = _base.state.type & _types3.TokenType.PRECEDENCE_MASK; + if (prec > 0 && (!noIn || !_index3.match.call(void 0, _types3.TokenType._in))) { + if (prec > minPrec) { + const op = _base.state.type; + _index3.next.call(void 0, ); + + parseMaybeUnary(); + parseExprOp(op & _types3.TokenType.IS_RIGHT_ASSOCIATIVE ? prec - 1 : prec, noIn); + parseExprOp(minPrec, noIn); + } + } +} + +// Parse unary operators, both prefix and postfix. +// Returns true if this was an arrow function. + function parseMaybeUnary() { + if (_base.isTypeScriptEnabled && !_base.isJSXEnabled && _index3.eat.call(void 0, _types3.TokenType.lessThan)) { + _typescript.tsParseTypeAssertion.call(void 0, ); + return false; + } + + if (_base.state.type & _types3.TokenType.IS_PREFIX) { + _index3.next.call(void 0, ); + parseMaybeUnary(); + return false; + } + + const wasArrow = parseExprSubscripts(); + if (wasArrow) { + return true; + } + while (_base.state.type & _types3.TokenType.IS_POSTFIX && !_util.canInsertSemicolon.call(void 0, )) { + // The tokenizer calls everything a preincrement, so make it a postincrement when + // we see it in that context. + if (_base.state.type === _types3.TokenType.preIncDec) { + _base.state.type = _types3.TokenType.postIncDec; + } + _index3.next.call(void 0, ); + } + return false; +} exports.parseMaybeUnary = parseMaybeUnary; + +// Parse call, dot, and `[]`-subscript expressions. +// Returns true if this was an arrow function. + function parseExprSubscripts() { + const startPos = _base.state.start; + const wasArrow = parseExprAtom(); + if (wasArrow) { + return true; + } + parseSubscripts(startPos); + return false; +} exports.parseExprSubscripts = parseExprSubscripts; + +function parseSubscripts(startPos, noCalls = false) { + if (_base.isFlowEnabled) { + _flow.flowParseSubscripts.call(void 0, startPos, noCalls); + } else { + baseParseSubscripts(startPos, noCalls); + } +} + + function baseParseSubscripts(startPos, noCalls = false) { + const stopState = new StopState(false); + do { + parseSubscript(startPos, noCalls, stopState); + } while (!stopState.stop && !_base.state.error); +} exports.baseParseSubscripts = baseParseSubscripts; + +function parseSubscript(startPos, noCalls, stopState) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseSubscript.call(void 0, startPos, noCalls, stopState); + } else if (_base.isFlowEnabled) { + _flow.flowParseSubscript.call(void 0, startPos, noCalls, stopState); + } else { + baseParseSubscript(startPos, noCalls, stopState); + } +} + +/** Set 'state.stop = true' to indicate that we should stop parsing subscripts. */ + function baseParseSubscript(startPos, noCalls, stopState) { + if (!noCalls && _index3.eat.call(void 0, _types3.TokenType.doubleColon)) { + parseNoCallExpr(); + stopState.stop = true; + parseSubscripts(startPos, noCalls); + } else if (_index3.match.call(void 0, _types3.TokenType.questionDot)) { + if (noCalls && _index3.lookaheadType.call(void 0, ) === _types3.TokenType.parenL) { + stopState.stop = true; + return; + } + _index3.next.call(void 0, ); + + if (_index3.eat.call(void 0, _types3.TokenType.bracketL)) { + parseExpression(); + _util.expect.call(void 0, _types3.TokenType.bracketR); + } else if (_index3.eat.call(void 0, _types3.TokenType.parenL)) { + parseCallExpressionArguments(); + } else { + parseIdentifier(); + } + } else if (_index3.eat.call(void 0, _types3.TokenType.dot)) { + parseMaybePrivateName(); + } else if (_index3.eat.call(void 0, _types3.TokenType.bracketL)) { + parseExpression(); + _util.expect.call(void 0, _types3.TokenType.bracketR); + } else if (!noCalls && _index3.match.call(void 0, _types3.TokenType.parenL)) { + if (atPossibleAsync()) { + // We see "async", but it's possible it's a usage of the name "async". Parse as if it's a + // function call, and if we see an arrow later, backtrack and re-parse as a parameter list. + const snapshot = _base.state.snapshot(); + const startTokenIndex = _base.state.tokens.length; + _index3.next.call(void 0, ); + + const callContextId = _base.getNextContextId.call(void 0, ); + + _base.state.tokens[_base.state.tokens.length - 1].contextId = callContextId; + parseCallExpressionArguments(); + _base.state.tokens[_base.state.tokens.length - 1].contextId = callContextId; + + if (shouldParseAsyncArrow()) { + // We hit an arrow, so backtrack and start again parsing function parameters. + _base.state.restoreFromSnapshot(snapshot); + stopState.stop = true; + _base.state.scopeDepth++; + + _statement.parseFunctionParams.call(void 0, ); + parseAsyncArrowFromCallExpression(startPos, startTokenIndex); + } + } else { + _index3.next.call(void 0, ); + const callContextId = _base.getNextContextId.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].contextId = callContextId; + parseCallExpressionArguments(); + _base.state.tokens[_base.state.tokens.length - 1].contextId = callContextId; + } + } else if (_index3.match.call(void 0, _types3.TokenType.backQuote)) { + // Tagged template expression. + parseTemplate(); + } else { + stopState.stop = true; + } +} exports.baseParseSubscript = baseParseSubscript; + + function atPossibleAsync() { + // This was made less strict than the original version to avoid passing around nodes, but it + // should be safe to have rare false positives here. + return ( + _base.state.tokens[_base.state.tokens.length - 1].contextualKeyword === _keywords.ContextualKeyword._async && + !_util.canInsertSemicolon.call(void 0, ) + ); +} exports.atPossibleAsync = atPossibleAsync; + + function parseCallExpressionArguments() { + let first = true; + while (!_index3.eat.call(void 0, _types3.TokenType.parenR) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types3.TokenType.comma); + if (_index3.eat.call(void 0, _types3.TokenType.parenR)) { + break; + } + } + + parseExprListItem(false); + } +} exports.parseCallExpressionArguments = parseCallExpressionArguments; + +function shouldParseAsyncArrow() { + return _index3.match.call(void 0, _types3.TokenType.colon) || _index3.match.call(void 0, _types3.TokenType.arrow); +} + +function parseAsyncArrowFromCallExpression(functionStart, startTokenIndex) { + if (_base.isTypeScriptEnabled) { + _typescript.tsStartParseAsyncArrowFromCallExpression.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowStartParseAsyncArrowFromCallExpression.call(void 0, ); + } + _util.expect.call(void 0, _types3.TokenType.arrow); + parseArrowExpression(startTokenIndex); +} + +// Parse a no-call expression (like argument of `new` or `::` operators). + +function parseNoCallExpr() { + const startPos = _base.state.start; + parseExprAtom(); + parseSubscripts(startPos, true); +} + +// Parse an atomic expression — either a single token that is an +// expression, an expression started by a keyword like `function` or +// `new`, or an expression wrapped in punctuation like `()`, `[]`, +// or `{}`. +// Returns true if the parsed expression was an arrow function. + function parseExprAtom() { + if (_index3.match.call(void 0, _types3.TokenType.jsxText)) { + parseLiteral(); + return false; + } else if (_index3.match.call(void 0, _types3.TokenType.lessThan) && _base.isJSXEnabled) { + _base.state.type = _types3.TokenType.jsxTagStart; + _index.jsxParseElement.call(void 0, ); + _index3.next.call(void 0, ); + return false; + } + + const canBeArrow = _base.state.potentialArrowAt === _base.state.start; + switch (_base.state.type) { + case _types3.TokenType.slash: + case _types3.TokenType.assign: + _index3.retokenizeSlashAsRegex.call(void 0, ); + // Fall through. + + case _types3.TokenType._super: + case _types3.TokenType._this: + case _types3.TokenType.regexp: + case _types3.TokenType.num: + case _types3.TokenType.bigint: + case _types3.TokenType.string: + case _types3.TokenType._null: + case _types3.TokenType._true: + case _types3.TokenType._false: + _index3.next.call(void 0, ); + return false; + + case _types3.TokenType._import: + if (_index3.lookaheadType.call(void 0, ) === _types3.TokenType.dot) { + parseImportMetaProperty(); + return false; + } + _index3.next.call(void 0, ); + return false; + + case _types3.TokenType.name: { + const startTokenIndex = _base.state.tokens.length; + const functionStart = _base.state.start; + const contextualKeyword = _base.state.contextualKeyword; + parseIdentifier(); + if (contextualKeyword === _keywords.ContextualKeyword._await) { + parseAwait(); + return false; + } else if ( + contextualKeyword === _keywords.ContextualKeyword._async && + _index3.match.call(void 0, _types3.TokenType._function) && + !_util.canInsertSemicolon.call(void 0, ) + ) { + _index3.next.call(void 0, ); + _statement.parseFunction.call(void 0, functionStart, false); + return false; + } else if ( + canBeArrow && + !_util.canInsertSemicolon.call(void 0, ) && + contextualKeyword === _keywords.ContextualKeyword._async && + _index3.match.call(void 0, _types3.TokenType.name) + ) { + _base.state.scopeDepth++; + _lval.parseBindingIdentifier.call(void 0, false); + _util.expect.call(void 0, _types3.TokenType.arrow); + // let foo = async bar => {}; + parseArrowExpression(startTokenIndex); + return true; + } + + if (canBeArrow && !_util.canInsertSemicolon.call(void 0, ) && _index3.match.call(void 0, _types3.TokenType.arrow)) { + _base.state.scopeDepth++; + _lval.markPriorBindingIdentifier.call(void 0, false); + _util.expect.call(void 0, _types3.TokenType.arrow); + parseArrowExpression(startTokenIndex); + return true; + } + + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index3.IdentifierRole.Access; + return false; + } + + case _types3.TokenType._do: { + _index3.next.call(void 0, ); + _statement.parseBlock.call(void 0, false); + return false; + } + + case _types3.TokenType.parenL: { + const wasArrow = parseParenAndDistinguishExpression(canBeArrow); + return wasArrow; + } + + case _types3.TokenType.bracketL: + _index3.next.call(void 0, ); + parseExprList(_types3.TokenType.bracketR, true); + return false; + + case _types3.TokenType.braceL: + parseObj(false, false); + return false; + + case _types3.TokenType._function: + parseFunctionExpression(); + return false; + + case _types3.TokenType.at: + _statement.parseDecorators.call(void 0, ); + // Fall through. + + case _types3.TokenType._class: + _statement.parseClass.call(void 0, false); + return false; + + case _types3.TokenType._new: + parseNew(); + return false; + + case _types3.TokenType.backQuote: + parseTemplate(); + return false; + + case _types3.TokenType.doubleColon: { + _index3.next.call(void 0, ); + parseNoCallExpr(); + return false; + } + + case _types3.TokenType.hash: { + // Smart pipeline topic reference. + _index3.next.call(void 0, ); + return false; + } + + default: + _util.unexpected.call(void 0, ); + return false; + } +} exports.parseExprAtom = parseExprAtom; + +function parseMaybePrivateName() { + _index3.eat.call(void 0, _types3.TokenType.hash); + parseIdentifier(); +} + +function parseFunctionExpression() { + const functionStart = _base.state.start; + parseIdentifier(); + if (_index3.eat.call(void 0, _types3.TokenType.dot)) { + // function.sent + parseMetaProperty(); + } + _statement.parseFunction.call(void 0, functionStart, false); +} + +function parseMetaProperty() { + parseIdentifier(); +} + +function parseImportMetaProperty() { + parseIdentifier(); + _util.expect.call(void 0, _types3.TokenType.dot); + // import.meta + parseMetaProperty(); +} + + function parseLiteral() { + _index3.next.call(void 0, ); +} exports.parseLiteral = parseLiteral; + + function parseParenExpression() { + _util.expect.call(void 0, _types3.TokenType.parenL); + parseExpression(); + _util.expect.call(void 0, _types3.TokenType.parenR); +} exports.parseParenExpression = parseParenExpression; + +// Returns true if this was an arrow expression. +function parseParenAndDistinguishExpression(canBeArrow) { + // Assume this is a normal parenthesized expression, but if we see an arrow, we'll bail and + // start over as a parameter list. + const snapshot = _base.state.snapshot(); + + const startTokenIndex = _base.state.tokens.length; + _util.expect.call(void 0, _types3.TokenType.parenL); + + let first = true; + + while (!_index3.match.call(void 0, _types3.TokenType.parenR) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types3.TokenType.comma); + if (_index3.match.call(void 0, _types3.TokenType.parenR)) { + break; + } + } + + if (_index3.match.call(void 0, _types3.TokenType.ellipsis)) { + _lval.parseRest.call(void 0, false /* isBlockScope */); + parseParenItem(); + break; + } else { + parseMaybeAssign(false, true); + } + } + + _util.expect.call(void 0, _types3.TokenType.parenR); + + if (canBeArrow && shouldParseArrow()) { + const wasArrow = parseArrow(); + if (wasArrow) { + // It was an arrow function this whole time, so start over and parse it as params so that we + // get proper token annotations. + _base.state.restoreFromSnapshot(snapshot); + _base.state.scopeDepth++; + // Don't specify a context ID because arrow functions don't need a context ID. + _statement.parseFunctionParams.call(void 0, ); + parseArrow(); + parseArrowExpression(startTokenIndex); + return true; + } + } + + return false; +} + +function shouldParseArrow() { + return _index3.match.call(void 0, _types3.TokenType.colon) || !_util.canInsertSemicolon.call(void 0, ); +} + +// Returns whether there was an arrow token. + function parseArrow() { + if (_base.isTypeScriptEnabled) { + return _typescript.tsParseArrow.call(void 0, ); + } else if (_base.isFlowEnabled) { + return _flow.flowParseArrow.call(void 0, ); + } else { + return _index3.eat.call(void 0, _types3.TokenType.arrow); + } +} exports.parseArrow = parseArrow; + +function parseParenItem() { + if (_base.isTypeScriptEnabled || _base.isFlowEnabled) { + _types.typedParseParenItem.call(void 0, ); + } +} + +// New's precedence is slightly tricky. It must allow its argument to +// be a `[]` or dot subscript expression, but not a call — at least, +// not without wrapping it in parentheses. Thus, it uses the noCalls +// argument to parseSubscripts to prevent it from consuming the +// argument list. +function parseNew() { + _util.expect.call(void 0, _types3.TokenType._new); + if (_index3.eat.call(void 0, _types3.TokenType.dot)) { + // new.target + parseMetaProperty(); + return; + } + parseNoCallExpr(); + _index3.eat.call(void 0, _types3.TokenType.questionDot); + parseNewArguments(); +} + +function parseNewArguments() { + if (_base.isTypeScriptEnabled) { + _typescript.tsStartParseNewArguments.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowStartParseNewArguments.call(void 0, ); + } + if (_index3.eat.call(void 0, _types3.TokenType.parenL)) { + parseExprList(_types3.TokenType.parenR); + } +} + + function parseTemplate() { + // Finish `, read quasi + _index3.nextTemplateToken.call(void 0, ); + // Finish quasi, read ${ + _index3.nextTemplateToken.call(void 0, ); + while (!_index3.match.call(void 0, _types3.TokenType.backQuote) && !_base.state.error) { + _util.expect.call(void 0, _types3.TokenType.dollarBraceL); + parseExpression(); + // Finish }, read quasi + _index3.nextTemplateToken.call(void 0, ); + // Finish quasi, read either ${ or ` + _index3.nextTemplateToken.call(void 0, ); + } + _index3.next.call(void 0, ); +} exports.parseTemplate = parseTemplate; + +// Parse an object literal or binding pattern. + function parseObj(isPattern, isBlockScope) { + // Attach a context ID to the object open and close brace and each object key. + const contextId = _base.getNextContextId.call(void 0, ); + let first = true; + + _index3.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + + while (!_index3.eat.call(void 0, _types3.TokenType.braceR) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types3.TokenType.comma); + if (_index3.eat.call(void 0, _types3.TokenType.braceR)) { + break; + } + } + + let isGenerator = false; + if (_index3.match.call(void 0, _types3.TokenType.ellipsis)) { + const previousIndex = _base.state.tokens.length; + _lval.parseSpread.call(void 0, ); + if (isPattern) { + // Mark role when the only thing being spread over is an identifier. + if (_base.state.tokens.length === previousIndex + 2) { + _lval.markPriorBindingIdentifier.call(void 0, isBlockScope); + } + if (_index3.eat.call(void 0, _types3.TokenType.braceR)) { + break; + } + } + continue; + } + + if (!isPattern) { + isGenerator = _index3.eat.call(void 0, _types3.TokenType.star); + } + + if (!isPattern && _util.isContextual.call(void 0, _keywords.ContextualKeyword._async)) { + if (isGenerator) _util.unexpected.call(void 0, ); + + parseIdentifier(); + if ( + _index3.match.call(void 0, _types3.TokenType.colon) || + _index3.match.call(void 0, _types3.TokenType.parenL) || + _index3.match.call(void 0, _types3.TokenType.braceR) || + _index3.match.call(void 0, _types3.TokenType.eq) || + _index3.match.call(void 0, _types3.TokenType.comma) + ) { + // This is a key called "async" rather than an async function. + } else { + if (_index3.match.call(void 0, _types3.TokenType.star)) { + _index3.next.call(void 0, ); + isGenerator = true; + } + parsePropertyName(contextId); + } + } else { + parsePropertyName(contextId); + } + + parseObjPropValue(isPattern, isBlockScope, contextId); + } + + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; +} exports.parseObj = parseObj; + +function isGetterOrSetterMethod(isPattern) { + // We go off of the next and don't bother checking if the node key is actually "get" or "set". + // This lets us avoid generating a node, and should only make the validation worse. + return ( + !isPattern && + (_index3.match.call(void 0, _types3.TokenType.string) || // get "string"() {} + _index3.match.call(void 0, _types3.TokenType.num) || // get 1() {} + _index3.match.call(void 0, _types3.TokenType.bracketL) || // get ["string"]() {} + _index3.match.call(void 0, _types3.TokenType.name) || // get foo() {} + !!(_base.state.type & _types3.TokenType.IS_KEYWORD)) // get debugger() {} + ); +} + +// Returns true if this was a method. +function parseObjectMethod(isPattern, objectContextId) { + // We don't need to worry about modifiers because object methods can't have optional bodies, so + // the start will never be used. + const functionStart = _base.state.start; + if (_index3.match.call(void 0, _types3.TokenType.parenL)) { + if (isPattern) _util.unexpected.call(void 0, ); + parseMethod(functionStart, /* isConstructor */ false); + return true; + } + + if (isGetterOrSetterMethod(isPattern)) { + parsePropertyName(objectContextId); + parseMethod(functionStart, /* isConstructor */ false); + return true; + } + return false; +} + +function parseObjectProperty(isPattern, isBlockScope) { + if (_index3.eat.call(void 0, _types3.TokenType.colon)) { + if (isPattern) { + _lval.parseMaybeDefault.call(void 0, isBlockScope); + } else { + parseMaybeAssign(false); + } + return; + } + + // Since there's no colon, we assume this is an object shorthand. + + // If we're in a destructuring, we've now discovered that the key was actually an assignee, so + // we need to tag it as a declaration with the appropriate scope. Otherwise, we might need to + // transform it on access, so mark it as a normal object shorthand. + if (isPattern) { + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = isBlockScope + ? _index3.IdentifierRole.ObjectShorthandBlockScopedDeclaration + : _index3.IdentifierRole.ObjectShorthandFunctionScopedDeclaration; + } else { + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index3.IdentifierRole.ObjectShorthand; + } + + // Regardless of whether we know this to be a pattern or if we're in an ambiguous context, allow + // parsing as if there's a default value. + _lval.parseMaybeDefault.call(void 0, isBlockScope, true); +} + +function parseObjPropValue( + isPattern, + isBlockScope, + objectContextId, +) { + if (_base.isTypeScriptEnabled) { + _typescript.tsStartParseObjPropValue.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowStartParseObjPropValue.call(void 0, ); + } + const wasMethod = parseObjectMethod(isPattern, objectContextId); + if (!wasMethod) { + parseObjectProperty(isPattern, isBlockScope); + } +} + + function parsePropertyName(objectContextId) { + if (_base.isFlowEnabled) { + _flow.flowParseVariance.call(void 0, ); + } + if (_index3.eat.call(void 0, _types3.TokenType.bracketL)) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = objectContextId; + parseMaybeAssign(); + _util.expect.call(void 0, _types3.TokenType.bracketR); + _base.state.tokens[_base.state.tokens.length - 1].contextId = objectContextId; + } else { + if (_index3.match.call(void 0, _types3.TokenType.num) || _index3.match.call(void 0, _types3.TokenType.string)) { + parseExprAtom(); + } else { + parseMaybePrivateName(); + } + + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index3.IdentifierRole.ObjectKey; + _base.state.tokens[_base.state.tokens.length - 1].contextId = objectContextId; + } +} exports.parsePropertyName = parsePropertyName; + +// Parse object or class method. + function parseMethod(functionStart, isConstructor) { + const funcContextId = _base.getNextContextId.call(void 0, ); + + _base.state.scopeDepth++; + const startTokenIndex = _base.state.tokens.length; + const allowModifiers = isConstructor; // For TypeScript parameter properties + _statement.parseFunctionParams.call(void 0, allowModifiers, funcContextId); + parseFunctionBodyAndFinish(functionStart, funcContextId); + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, true)); + _base.state.scopeDepth--; +} exports.parseMethod = parseMethod; + +// Parse arrow function expression. +// If the parameters are provided, they will be converted to an +// assignable list. + function parseArrowExpression(startTokenIndex) { + parseFunctionBody(true); + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, true)); + _base.state.scopeDepth--; +} exports.parseArrowExpression = parseArrowExpression; + + function parseFunctionBodyAndFinish(functionStart, funcContextId = 0) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseFunctionBodyAndFinish.call(void 0, functionStart, funcContextId); + } else if (_base.isFlowEnabled) { + _flow.flowParseFunctionBodyAndFinish.call(void 0, funcContextId); + } else { + parseFunctionBody(false, funcContextId); + } +} exports.parseFunctionBodyAndFinish = parseFunctionBodyAndFinish; + + function parseFunctionBody(allowExpression, funcContextId = 0) { + const isExpression = allowExpression && !_index3.match.call(void 0, _types3.TokenType.braceL); + + if (isExpression) { + parseMaybeAssign(); + } else { + _statement.parseBlock.call(void 0, true /* allowDirectives */, true /* isFunctionScope */, funcContextId); + } +} exports.parseFunctionBody = parseFunctionBody; + +// Parses a comma-separated list of expressions, and returns them as +// an array. `close` is the token type that ends the list, and +// `allowEmpty` can be turned on to allow subsequent commas with +// nothing in between them to be parsed as `null` (which is needed +// for array literals). + +function parseExprList(close, allowEmpty = false) { + let first = true; + while (!_index3.eat.call(void 0, close) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types3.TokenType.comma); + if (_index3.eat.call(void 0, close)) break; + } + parseExprListItem(allowEmpty); + } +} + +function parseExprListItem(allowEmpty) { + if (allowEmpty && _index3.match.call(void 0, _types3.TokenType.comma)) { + // Empty item; nothing more to parse for this item. + } else if (_index3.match.call(void 0, _types3.TokenType.ellipsis)) { + _lval.parseSpread.call(void 0, ); + parseParenItem(); + } else if (_index3.match.call(void 0, _types3.TokenType.question)) { + // Partial function application proposal. + _index3.next.call(void 0, ); + } else { + parseMaybeAssign(false, true); + } +} + +// Parse the next token as an identifier. + function parseIdentifier() { + _index3.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types3.TokenType.name; +} exports.parseIdentifier = parseIdentifier; + +// Parses await expression inside async function. +function parseAwait() { + parseMaybeUnary(); +} + +// Parses yield expression inside generator. +function parseYield() { + _index3.next.call(void 0, ); + if (!_index3.match.call(void 0, _types3.TokenType.semi) && !_util.canInsertSemicolon.call(void 0, )) { + _index3.eat.call(void 0, _types3.TokenType.star); + parseMaybeAssign(); + } +} diff --git a/node_modules/sucrase/dist/parser/traverser/expression.mjs b/node_modules/sucrase/dist/parser/traverser/expression.mjs new file mode 100644 index 00000000..67931292 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/expression.mjs @@ -0,0 +1,950 @@ +/* eslint max-len: 0 */ + +// A recursive descent parser operates by defining functions for all +// syntactic elements, and recursively calling those, each function +// advancing the input stream and returning an AST node. Precedence +// of constructs (for example, the fact that `!x[1]` means `!(x[1])` +// instead of `(!x)[1]` is handled by the fact that the parser +// function that parses unary prefix operators is called first, and +// in turn calls the function that parses `[]` subscripts — that +// way, it'll receive the node for `x[1]` already parsed, and wraps +// *that* in the unary operator node. +// +// Acorn uses an [operator precedence parser][opp] to handle binary +// operator precedence, because it is much more compact than using +// the technique outlined above, which uses different, nesting +// functions to specify precedence, for all of the ten binary +// precedence levels that JavaScript defines. +// +// [opp]: http://en.wikipedia.org/wiki/Operator-precedence_parser + +import { + flowParseArrow, + flowParseFunctionBodyAndFinish, + flowParseMaybeAssign, + flowParseSubscript, + flowParseSubscripts, + flowParseVariance, + flowStartParseAsyncArrowFromCallExpression, + flowStartParseNewArguments, + flowStartParseObjPropValue, +} from "../plugins/flow"; +import {jsxParseElement} from "../plugins/jsx/index"; +import {typedParseConditional, typedParseParenItem} from "../plugins/types"; +import { + tsParseArrow, + tsParseFunctionBodyAndFinish, + tsParseMaybeAssign, + tsParseSubscript, + tsParseType, + tsParseTypeAssertion, + tsStartParseAsyncArrowFromCallExpression, + tsStartParseNewArguments, + tsStartParseObjPropValue, +} from "../plugins/typescript"; +import { + eat, + IdentifierRole, + lookaheadType, + match, + next, + nextTemplateToken, + popTypeContext, + pushTypeContext, + retokenizeSlashAsRegex, +} from "../tokenizer/index"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {Scope} from "../tokenizer/state"; +import {TokenType, TokenType as tt} from "../tokenizer/types"; +import {getNextContextId, isFlowEnabled, isJSXEnabled, isTypeScriptEnabled, state} from "./base"; +import { + markPriorBindingIdentifier, + parseBindingIdentifier, + parseMaybeDefault, + parseRest, + parseSpread, +} from "./lval"; +import { + parseBlock, + parseClass, + parseDecorators, + parseFunction, + parseFunctionParams, +} from "./statement"; +import { + canInsertSemicolon, + eatContextual, + expect, + hasPrecedingLineBreak, + isContextual, + unexpected, +} from "./util"; + +export class StopState { + + constructor(stop) { + this.stop = stop; + } +} + +// ### Expression parsing + +// These nest, from the most general expression type at the top to +// 'atomic', nondivisible expression types at the bottom. Most of +// the functions will simply let the function (s) below them parse, +// and, *if* the syntactic construct they handle is present, wrap +// the AST node that the inner parser gave them in another node. +export function parseExpression(noIn = false) { + parseMaybeAssign(noIn); + if (match(tt.comma)) { + while (eat(tt.comma)) { + parseMaybeAssign(noIn); + } + } +} + +/** + * noIn is used when parsing a for loop so that we don't interpret a following "in" as the binary + * operatior. + * isWithinParens is used to indicate that we're parsing something that might be a comma expression + * or might be an arrow function or might be a Flow type assertion (which requires explicit parens). + * In these cases, we should allow : and ?: after the initial "left" part. + */ +export function parseMaybeAssign(noIn = false, isWithinParens = false) { + if (isTypeScriptEnabled) { + return tsParseMaybeAssign(noIn, isWithinParens); + } else if (isFlowEnabled) { + return flowParseMaybeAssign(noIn, isWithinParens); + } else { + return baseParseMaybeAssign(noIn, isWithinParens); + } +} + +// Parse an assignment expression. This includes applications of +// operators like `+=`. +// Returns true if the expression was an arrow function. +export function baseParseMaybeAssign(noIn, isWithinParens) { + if (match(tt._yield)) { + parseYield(); + return false; + } + + if (match(tt.parenL) || match(tt.name) || match(tt._yield)) { + state.potentialArrowAt = state.start; + } + + const wasArrow = parseMaybeConditional(noIn); + if (isWithinParens) { + parseParenItem(); + } + if (state.type & TokenType.IS_ASSIGN) { + next(); + parseMaybeAssign(noIn); + return false; + } + return wasArrow; +} + +// Parse a ternary conditional (`?:`) operator. +// Returns true if the expression was an arrow function. +function parseMaybeConditional(noIn) { + const wasArrow = parseExprOps(noIn); + if (wasArrow) { + return true; + } + parseConditional(noIn); + return false; +} + +function parseConditional(noIn) { + if (isTypeScriptEnabled || isFlowEnabled) { + typedParseConditional(noIn); + } else { + baseParseConditional(noIn); + } +} + +export function baseParseConditional(noIn) { + if (eat(tt.question)) { + parseMaybeAssign(); + expect(tt.colon); + parseMaybeAssign(noIn); + } +} + +// Start the precedence parser. +// Returns true if this was an arrow function +function parseExprOps(noIn) { + const wasArrow = parseMaybeUnary(); + if (wasArrow) { + return true; + } + parseExprOp(-1, noIn); + return false; +} + +// Parse binary operators with the operator precedence parsing +// algorithm. `left` is the left-hand side of the operator. +// `minPrec` provides context that allows the function to stop and +// defer further parser to one of its callers when it encounters an +// operator that has a lower precedence than the set it is parsing. +function parseExprOp(minPrec, noIn) { + if ( + isTypeScriptEnabled && + (tt._in & TokenType.PRECEDENCE_MASK) > minPrec && + !hasPrecedingLineBreak() && + eatContextual(ContextualKeyword._as) + ) { + state.tokens[state.tokens.length - 1].type = tt._as; + const oldIsType = pushTypeContext(1); + tsParseType(); + popTypeContext(oldIsType); + parseExprOp(minPrec, noIn); + return; + } + + const prec = state.type & TokenType.PRECEDENCE_MASK; + if (prec > 0 && (!noIn || !match(tt._in))) { + if (prec > minPrec) { + const op = state.type; + next(); + + parseMaybeUnary(); + parseExprOp(op & TokenType.IS_RIGHT_ASSOCIATIVE ? prec - 1 : prec, noIn); + parseExprOp(minPrec, noIn); + } + } +} + +// Parse unary operators, both prefix and postfix. +// Returns true if this was an arrow function. +export function parseMaybeUnary() { + if (isTypeScriptEnabled && !isJSXEnabled && eat(tt.lessThan)) { + tsParseTypeAssertion(); + return false; + } + + if (state.type & TokenType.IS_PREFIX) { + next(); + parseMaybeUnary(); + return false; + } + + const wasArrow = parseExprSubscripts(); + if (wasArrow) { + return true; + } + while (state.type & TokenType.IS_POSTFIX && !canInsertSemicolon()) { + // The tokenizer calls everything a preincrement, so make it a postincrement when + // we see it in that context. + if (state.type === tt.preIncDec) { + state.type = tt.postIncDec; + } + next(); + } + return false; +} + +// Parse call, dot, and `[]`-subscript expressions. +// Returns true if this was an arrow function. +export function parseExprSubscripts() { + const startPos = state.start; + const wasArrow = parseExprAtom(); + if (wasArrow) { + return true; + } + parseSubscripts(startPos); + return false; +} + +function parseSubscripts(startPos, noCalls = false) { + if (isFlowEnabled) { + flowParseSubscripts(startPos, noCalls); + } else { + baseParseSubscripts(startPos, noCalls); + } +} + +export function baseParseSubscripts(startPos, noCalls = false) { + const stopState = new StopState(false); + do { + parseSubscript(startPos, noCalls, stopState); + } while (!stopState.stop && !state.error); +} + +function parseSubscript(startPos, noCalls, stopState) { + if (isTypeScriptEnabled) { + tsParseSubscript(startPos, noCalls, stopState); + } else if (isFlowEnabled) { + flowParseSubscript(startPos, noCalls, stopState); + } else { + baseParseSubscript(startPos, noCalls, stopState); + } +} + +/** Set 'state.stop = true' to indicate that we should stop parsing subscripts. */ +export function baseParseSubscript(startPos, noCalls, stopState) { + if (!noCalls && eat(tt.doubleColon)) { + parseNoCallExpr(); + stopState.stop = true; + parseSubscripts(startPos, noCalls); + } else if (match(tt.questionDot)) { + if (noCalls && lookaheadType() === tt.parenL) { + stopState.stop = true; + return; + } + next(); + + if (eat(tt.bracketL)) { + parseExpression(); + expect(tt.bracketR); + } else if (eat(tt.parenL)) { + parseCallExpressionArguments(); + } else { + parseIdentifier(); + } + } else if (eat(tt.dot)) { + parseMaybePrivateName(); + } else if (eat(tt.bracketL)) { + parseExpression(); + expect(tt.bracketR); + } else if (!noCalls && match(tt.parenL)) { + if (atPossibleAsync()) { + // We see "async", but it's possible it's a usage of the name "async". Parse as if it's a + // function call, and if we see an arrow later, backtrack and re-parse as a parameter list. + const snapshot = state.snapshot(); + const startTokenIndex = state.tokens.length; + next(); + + const callContextId = getNextContextId(); + + state.tokens[state.tokens.length - 1].contextId = callContextId; + parseCallExpressionArguments(); + state.tokens[state.tokens.length - 1].contextId = callContextId; + + if (shouldParseAsyncArrow()) { + // We hit an arrow, so backtrack and start again parsing function parameters. + state.restoreFromSnapshot(snapshot); + stopState.stop = true; + state.scopeDepth++; + + parseFunctionParams(); + parseAsyncArrowFromCallExpression(startPos, startTokenIndex); + } + } else { + next(); + const callContextId = getNextContextId(); + state.tokens[state.tokens.length - 1].contextId = callContextId; + parseCallExpressionArguments(); + state.tokens[state.tokens.length - 1].contextId = callContextId; + } + } else if (match(tt.backQuote)) { + // Tagged template expression. + parseTemplate(); + } else { + stopState.stop = true; + } +} + +export function atPossibleAsync() { + // This was made less strict than the original version to avoid passing around nodes, but it + // should be safe to have rare false positives here. + return ( + state.tokens[state.tokens.length - 1].contextualKeyword === ContextualKeyword._async && + !canInsertSemicolon() + ); +} + +export function parseCallExpressionArguments() { + let first = true; + while (!eat(tt.parenR) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (eat(tt.parenR)) { + break; + } + } + + parseExprListItem(false); + } +} + +function shouldParseAsyncArrow() { + return match(tt.colon) || match(tt.arrow); +} + +function parseAsyncArrowFromCallExpression(functionStart, startTokenIndex) { + if (isTypeScriptEnabled) { + tsStartParseAsyncArrowFromCallExpression(); + } else if (isFlowEnabled) { + flowStartParseAsyncArrowFromCallExpression(); + } + expect(tt.arrow); + parseArrowExpression(startTokenIndex); +} + +// Parse a no-call expression (like argument of `new` or `::` operators). + +function parseNoCallExpr() { + const startPos = state.start; + parseExprAtom(); + parseSubscripts(startPos, true); +} + +// Parse an atomic expression — either a single token that is an +// expression, an expression started by a keyword like `function` or +// `new`, or an expression wrapped in punctuation like `()`, `[]`, +// or `{}`. +// Returns true if the parsed expression was an arrow function. +export function parseExprAtom() { + if (match(tt.jsxText)) { + parseLiteral(); + return false; + } else if (match(tt.lessThan) && isJSXEnabled) { + state.type = tt.jsxTagStart; + jsxParseElement(); + next(); + return false; + } + + const canBeArrow = state.potentialArrowAt === state.start; + switch (state.type) { + case tt.slash: + case tt.assign: + retokenizeSlashAsRegex(); + // Fall through. + + case tt._super: + case tt._this: + case tt.regexp: + case tt.num: + case tt.bigint: + case tt.string: + case tt._null: + case tt._true: + case tt._false: + next(); + return false; + + case tt._import: + if (lookaheadType() === tt.dot) { + parseImportMetaProperty(); + return false; + } + next(); + return false; + + case tt.name: { + const startTokenIndex = state.tokens.length; + const functionStart = state.start; + const contextualKeyword = state.contextualKeyword; + parseIdentifier(); + if (contextualKeyword === ContextualKeyword._await) { + parseAwait(); + return false; + } else if ( + contextualKeyword === ContextualKeyword._async && + match(tt._function) && + !canInsertSemicolon() + ) { + next(); + parseFunction(functionStart, false); + return false; + } else if ( + canBeArrow && + !canInsertSemicolon() && + contextualKeyword === ContextualKeyword._async && + match(tt.name) + ) { + state.scopeDepth++; + parseBindingIdentifier(false); + expect(tt.arrow); + // let foo = async bar => {}; + parseArrowExpression(startTokenIndex); + return true; + } + + if (canBeArrow && !canInsertSemicolon() && match(tt.arrow)) { + state.scopeDepth++; + markPriorBindingIdentifier(false); + expect(tt.arrow); + parseArrowExpression(startTokenIndex); + return true; + } + + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.Access; + return false; + } + + case tt._do: { + next(); + parseBlock(false); + return false; + } + + case tt.parenL: { + const wasArrow = parseParenAndDistinguishExpression(canBeArrow); + return wasArrow; + } + + case tt.bracketL: + next(); + parseExprList(tt.bracketR, true); + return false; + + case tt.braceL: + parseObj(false, false); + return false; + + case tt._function: + parseFunctionExpression(); + return false; + + case tt.at: + parseDecorators(); + // Fall through. + + case tt._class: + parseClass(false); + return false; + + case tt._new: + parseNew(); + return false; + + case tt.backQuote: + parseTemplate(); + return false; + + case tt.doubleColon: { + next(); + parseNoCallExpr(); + return false; + } + + case tt.hash: { + // Smart pipeline topic reference. + next(); + return false; + } + + default: + unexpected(); + return false; + } +} + +function parseMaybePrivateName() { + eat(tt.hash); + parseIdentifier(); +} + +function parseFunctionExpression() { + const functionStart = state.start; + parseIdentifier(); + if (eat(tt.dot)) { + // function.sent + parseMetaProperty(); + } + parseFunction(functionStart, false); +} + +function parseMetaProperty() { + parseIdentifier(); +} + +function parseImportMetaProperty() { + parseIdentifier(); + expect(tt.dot); + // import.meta + parseMetaProperty(); +} + +export function parseLiteral() { + next(); +} + +export function parseParenExpression() { + expect(tt.parenL); + parseExpression(); + expect(tt.parenR); +} + +// Returns true if this was an arrow expression. +function parseParenAndDistinguishExpression(canBeArrow) { + // Assume this is a normal parenthesized expression, but if we see an arrow, we'll bail and + // start over as a parameter list. + const snapshot = state.snapshot(); + + const startTokenIndex = state.tokens.length; + expect(tt.parenL); + + let first = true; + + while (!match(tt.parenR) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (match(tt.parenR)) { + break; + } + } + + if (match(tt.ellipsis)) { + parseRest(false /* isBlockScope */); + parseParenItem(); + break; + } else { + parseMaybeAssign(false, true); + } + } + + expect(tt.parenR); + + if (canBeArrow && shouldParseArrow()) { + const wasArrow = parseArrow(); + if (wasArrow) { + // It was an arrow function this whole time, so start over and parse it as params so that we + // get proper token annotations. + state.restoreFromSnapshot(snapshot); + state.scopeDepth++; + // Don't specify a context ID because arrow functions don't need a context ID. + parseFunctionParams(); + parseArrow(); + parseArrowExpression(startTokenIndex); + return true; + } + } + + return false; +} + +function shouldParseArrow() { + return match(tt.colon) || !canInsertSemicolon(); +} + +// Returns whether there was an arrow token. +export function parseArrow() { + if (isTypeScriptEnabled) { + return tsParseArrow(); + } else if (isFlowEnabled) { + return flowParseArrow(); + } else { + return eat(tt.arrow); + } +} + +function parseParenItem() { + if (isTypeScriptEnabled || isFlowEnabled) { + typedParseParenItem(); + } +} + +// New's precedence is slightly tricky. It must allow its argument to +// be a `[]` or dot subscript expression, but not a call — at least, +// not without wrapping it in parentheses. Thus, it uses the noCalls +// argument to parseSubscripts to prevent it from consuming the +// argument list. +function parseNew() { + expect(tt._new); + if (eat(tt.dot)) { + // new.target + parseMetaProperty(); + return; + } + parseNoCallExpr(); + eat(tt.questionDot); + parseNewArguments(); +} + +function parseNewArguments() { + if (isTypeScriptEnabled) { + tsStartParseNewArguments(); + } else if (isFlowEnabled) { + flowStartParseNewArguments(); + } + if (eat(tt.parenL)) { + parseExprList(tt.parenR); + } +} + +export function parseTemplate() { + // Finish `, read quasi + nextTemplateToken(); + // Finish quasi, read ${ + nextTemplateToken(); + while (!match(tt.backQuote) && !state.error) { + expect(tt.dollarBraceL); + parseExpression(); + // Finish }, read quasi + nextTemplateToken(); + // Finish quasi, read either ${ or ` + nextTemplateToken(); + } + next(); +} + +// Parse an object literal or binding pattern. +export function parseObj(isPattern, isBlockScope) { + // Attach a context ID to the object open and close brace and each object key. + const contextId = getNextContextId(); + let first = true; + + next(); + state.tokens[state.tokens.length - 1].contextId = contextId; + + while (!eat(tt.braceR) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (eat(tt.braceR)) { + break; + } + } + + let isGenerator = false; + if (match(tt.ellipsis)) { + const previousIndex = state.tokens.length; + parseSpread(); + if (isPattern) { + // Mark role when the only thing being spread over is an identifier. + if (state.tokens.length === previousIndex + 2) { + markPriorBindingIdentifier(isBlockScope); + } + if (eat(tt.braceR)) { + break; + } + } + continue; + } + + if (!isPattern) { + isGenerator = eat(tt.star); + } + + if (!isPattern && isContextual(ContextualKeyword._async)) { + if (isGenerator) unexpected(); + + parseIdentifier(); + if ( + match(tt.colon) || + match(tt.parenL) || + match(tt.braceR) || + match(tt.eq) || + match(tt.comma) + ) { + // This is a key called "async" rather than an async function. + } else { + if (match(tt.star)) { + next(); + isGenerator = true; + } + parsePropertyName(contextId); + } + } else { + parsePropertyName(contextId); + } + + parseObjPropValue(isPattern, isBlockScope, contextId); + } + + state.tokens[state.tokens.length - 1].contextId = contextId; +} + +function isGetterOrSetterMethod(isPattern) { + // We go off of the next and don't bother checking if the node key is actually "get" or "set". + // This lets us avoid generating a node, and should only make the validation worse. + return ( + !isPattern && + (match(tt.string) || // get "string"() {} + match(tt.num) || // get 1() {} + match(tt.bracketL) || // get ["string"]() {} + match(tt.name) || // get foo() {} + !!(state.type & TokenType.IS_KEYWORD)) // get debugger() {} + ); +} + +// Returns true if this was a method. +function parseObjectMethod(isPattern, objectContextId) { + // We don't need to worry about modifiers because object methods can't have optional bodies, so + // the start will never be used. + const functionStart = state.start; + if (match(tt.parenL)) { + if (isPattern) unexpected(); + parseMethod(functionStart, /* isConstructor */ false); + return true; + } + + if (isGetterOrSetterMethod(isPattern)) { + parsePropertyName(objectContextId); + parseMethod(functionStart, /* isConstructor */ false); + return true; + } + return false; +} + +function parseObjectProperty(isPattern, isBlockScope) { + if (eat(tt.colon)) { + if (isPattern) { + parseMaybeDefault(isBlockScope); + } else { + parseMaybeAssign(false); + } + return; + } + + // Since there's no colon, we assume this is an object shorthand. + + // If we're in a destructuring, we've now discovered that the key was actually an assignee, so + // we need to tag it as a declaration with the appropriate scope. Otherwise, we might need to + // transform it on access, so mark it as a normal object shorthand. + if (isPattern) { + state.tokens[state.tokens.length - 1].identifierRole = isBlockScope + ? IdentifierRole.ObjectShorthandBlockScopedDeclaration + : IdentifierRole.ObjectShorthandFunctionScopedDeclaration; + } else { + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ObjectShorthand; + } + + // Regardless of whether we know this to be a pattern or if we're in an ambiguous context, allow + // parsing as if there's a default value. + parseMaybeDefault(isBlockScope, true); +} + +function parseObjPropValue( + isPattern, + isBlockScope, + objectContextId, +) { + if (isTypeScriptEnabled) { + tsStartParseObjPropValue(); + } else if (isFlowEnabled) { + flowStartParseObjPropValue(); + } + const wasMethod = parseObjectMethod(isPattern, objectContextId); + if (!wasMethod) { + parseObjectProperty(isPattern, isBlockScope); + } +} + +export function parsePropertyName(objectContextId) { + if (isFlowEnabled) { + flowParseVariance(); + } + if (eat(tt.bracketL)) { + state.tokens[state.tokens.length - 1].contextId = objectContextId; + parseMaybeAssign(); + expect(tt.bracketR); + state.tokens[state.tokens.length - 1].contextId = objectContextId; + } else { + if (match(tt.num) || match(tt.string)) { + parseExprAtom(); + } else { + parseMaybePrivateName(); + } + + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ObjectKey; + state.tokens[state.tokens.length - 1].contextId = objectContextId; + } +} + +// Parse object or class method. +export function parseMethod(functionStart, isConstructor) { + const funcContextId = getNextContextId(); + + state.scopeDepth++; + const startTokenIndex = state.tokens.length; + const allowModifiers = isConstructor; // For TypeScript parameter properties + parseFunctionParams(allowModifiers, funcContextId); + parseFunctionBodyAndFinish(functionStart, funcContextId); + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, true)); + state.scopeDepth--; +} + +// Parse arrow function expression. +// If the parameters are provided, they will be converted to an +// assignable list. +export function parseArrowExpression(startTokenIndex) { + parseFunctionBody(true); + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, true)); + state.scopeDepth--; +} + +export function parseFunctionBodyAndFinish(functionStart, funcContextId = 0) { + if (isTypeScriptEnabled) { + tsParseFunctionBodyAndFinish(functionStart, funcContextId); + } else if (isFlowEnabled) { + flowParseFunctionBodyAndFinish(funcContextId); + } else { + parseFunctionBody(false, funcContextId); + } +} + +export function parseFunctionBody(allowExpression, funcContextId = 0) { + const isExpression = allowExpression && !match(tt.braceL); + + if (isExpression) { + parseMaybeAssign(); + } else { + parseBlock(true /* allowDirectives */, true /* isFunctionScope */, funcContextId); + } +} + +// Parses a comma-separated list of expressions, and returns them as +// an array. `close` is the token type that ends the list, and +// `allowEmpty` can be turned on to allow subsequent commas with +// nothing in between them to be parsed as `null` (which is needed +// for array literals). + +function parseExprList(close, allowEmpty = false) { + let first = true; + while (!eat(close) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (eat(close)) break; + } + parseExprListItem(allowEmpty); + } +} + +function parseExprListItem(allowEmpty) { + if (allowEmpty && match(tt.comma)) { + // Empty item; nothing more to parse for this item. + } else if (match(tt.ellipsis)) { + parseSpread(); + parseParenItem(); + } else if (match(tt.question)) { + // Partial function application proposal. + next(); + } else { + parseMaybeAssign(false, true); + } +} + +// Parse the next token as an identifier. +export function parseIdentifier() { + next(); + state.tokens[state.tokens.length - 1].type = tt.name; +} + +// Parses await expression inside async function. +function parseAwait() { + parseMaybeUnary(); +} + +// Parses yield expression inside generator. +function parseYield() { + next(); + if (!match(tt.semi) && !canInsertSemicolon()) { + eat(tt.star); + parseMaybeAssign(); + } +} diff --git a/node_modules/sucrase/dist/parser/traverser/index.d.ts b/node_modules/sucrase/dist/parser/traverser/index.d.ts new file mode 100644 index 00000000..e1fd455a --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/index.d.ts @@ -0,0 +1,2 @@ +import { File } from "../index"; +export declare function parseFile(): File; diff --git a/node_modules/sucrase/dist/parser/traverser/index.js b/node_modules/sucrase/dist/parser/traverser/index.js new file mode 100644 index 00000000..fe895dfd --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/index.js @@ -0,0 +1,18 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); +var _index3 = require('../tokenizer/index'); +var _charcodes = require('../util/charcodes'); +var _base = require('./base'); +var _statement = require('./statement'); + + function parseFile() { + // If enabled, skip leading hashbang line. + if ( + _base.state.pos === 0 && + _base.input.charCodeAt(0) === _charcodes.charCodes.numberSign && + _base.input.charCodeAt(1) === _charcodes.charCodes.exclamationMark + ) { + _index3.skipLineComment.call(void 0, 2); + } + _index3.nextToken.call(void 0, ); + return _statement.parseTopLevel.call(void 0, ); +} exports.parseFile = parseFile; diff --git a/node_modules/sucrase/dist/parser/traverser/index.mjs b/node_modules/sucrase/dist/parser/traverser/index.mjs new file mode 100644 index 00000000..eb8c990a --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/index.mjs @@ -0,0 +1,18 @@ + +import {nextToken, skipLineComment} from "../tokenizer/index"; +import {charCodes} from "../util/charcodes"; +import {input, state} from "./base"; +import {parseTopLevel} from "./statement"; + +export function parseFile() { + // If enabled, skip leading hashbang line. + if ( + state.pos === 0 && + input.charCodeAt(0) === charCodes.numberSign && + input.charCodeAt(1) === charCodes.exclamationMark + ) { + skipLineComment(2); + } + nextToken(); + return parseTopLevel(); +} diff --git a/node_modules/sucrase/dist/parser/traverser/lval.d.ts b/node_modules/sucrase/dist/parser/traverser/lval.d.ts new file mode 100644 index 00000000..3c687cea --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/lval.d.ts @@ -0,0 +1,9 @@ +import { TokenType } from "../tokenizer/types"; +export declare function parseSpread(): void; +export declare function parseRest(isBlockScope: boolean): void; +export declare function parseBindingIdentifier(isBlockScope: boolean): void; +export declare function parseImportedIdentifier(): void; +export declare function markPriorBindingIdentifier(isBlockScope: boolean): void; +export declare function parseBindingAtom(isBlockScope: boolean): void; +export declare function parseBindingList(close: TokenType, isBlockScope: boolean, allowEmpty?: boolean, allowModifiers?: boolean): void; +export declare function parseMaybeDefault(isBlockScope: boolean, leftAlreadyParsed?: boolean): void; diff --git a/node_modules/sucrase/dist/parser/traverser/lval.js b/node_modules/sucrase/dist/parser/traverser/lval.js new file mode 100644 index 00000000..03ff3d81 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/lval.js @@ -0,0 +1,154 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _flow = require('../plugins/flow'); + + + + +var _typescript = require('../plugins/typescript'); + + + + + + + +var _index = require('../tokenizer/index'); +var _keywords = require('../tokenizer/keywords'); +var _types = require('../tokenizer/types'); +var _base = require('./base'); +var _expression = require('./expression'); +var _util = require('./util'); + + function parseSpread() { + _index.next.call(void 0, ); + _expression.parseMaybeAssign.call(void 0, false); +} exports.parseSpread = parseSpread; + + function parseRest(isBlockScope) { + _index.next.call(void 0, ); + parseBindingAtom(isBlockScope); +} exports.parseRest = parseRest; + + function parseBindingIdentifier(isBlockScope) { + _expression.parseIdentifier.call(void 0, ); + markPriorBindingIdentifier(isBlockScope); +} exports.parseBindingIdentifier = parseBindingIdentifier; + + function parseImportedIdentifier() { + _expression.parseIdentifier.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.ImportDeclaration; +} exports.parseImportedIdentifier = parseImportedIdentifier; + + function markPriorBindingIdentifier(isBlockScope) { + if (_base.state.scopeDepth === 0) { + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.TopLevelDeclaration; + } else { + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = isBlockScope + ? _index.IdentifierRole.BlockScopedDeclaration + : _index.IdentifierRole.FunctionScopedDeclaration; + } +} exports.markPriorBindingIdentifier = markPriorBindingIdentifier; + +// Parses lvalue (assignable) atom. + function parseBindingAtom(isBlockScope) { + switch (_base.state.type) { + case _types.TokenType._this: { + // In TypeScript, "this" may be the name of a parameter, so allow it. + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.next.call(void 0, ); + _index.popTypeContext.call(void 0, oldIsType); + return; + } + + case _types.TokenType._yield: + case _types.TokenType.name: { + _base.state.type = _types.TokenType.name; + parseBindingIdentifier(isBlockScope); + return; + } + + case _types.TokenType.bracketL: { + _index.next.call(void 0, ); + parseBindingList(_types.TokenType.bracketR, isBlockScope, true /* allowEmpty */); + return; + } + + case _types.TokenType.braceL: + _expression.parseObj.call(void 0, true, isBlockScope); + return; + + default: + _util.unexpected.call(void 0, ); + } +} exports.parseBindingAtom = parseBindingAtom; + + function parseBindingList( + close, + isBlockScope, + allowEmpty = false, + allowModifiers = false, +) { + let first = true; + + let hasRemovedComma = false; + const firstItemTokenIndex = _base.state.tokens.length; + + while (!_index.eat.call(void 0, close) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types.TokenType.comma); + // After a "this" type in TypeScript, we need to set the following comma (if any) to also be + // a type token so that it will be removed. + if (!hasRemovedComma && _base.state.tokens[firstItemTokenIndex].isType) { + _base.state.tokens[_base.state.tokens.length - 1].isType = true; + hasRemovedComma = true; + } + } + if (allowEmpty && _index.match.call(void 0, _types.TokenType.comma)) { + // Empty item; nothing further to parse for this item. + } else if (_index.eat.call(void 0, close)) { + break; + } else if (_index.match.call(void 0, _types.TokenType.ellipsis)) { + parseRest(isBlockScope); + parseAssignableListItemTypes(); + // Support rest element trailing commas allowed by TypeScript <2.9. + _index.eat.call(void 0, _types.TokenType.comma); + _util.expect.call(void 0, close); + break; + } else { + parseAssignableListItem(allowModifiers, isBlockScope); + } + } +} exports.parseBindingList = parseBindingList; + +function parseAssignableListItem(allowModifiers, isBlockScope) { + if (allowModifiers) { + _typescript.tsParseAccessModifier.call(void 0, ); + _typescript.tsParseModifier.call(void 0, [_keywords.ContextualKeyword._readonly]); + } + + parseMaybeDefault(isBlockScope); + parseAssignableListItemTypes(); + parseMaybeDefault(isBlockScope, true /* leftAlreadyParsed */); +} + +function parseAssignableListItemTypes() { + if (_base.isFlowEnabled) { + _flow.flowParseAssignableListItemTypes.call(void 0, ); + } else if (_base.isTypeScriptEnabled) { + _typescript.tsParseAssignableListItemTypes.call(void 0, ); + } +} + +// Parses assignment pattern around given atom if possible. + function parseMaybeDefault(isBlockScope, leftAlreadyParsed = false) { + if (!leftAlreadyParsed) { + parseBindingAtom(isBlockScope); + } + if (!_index.eat.call(void 0, _types.TokenType.eq)) { + return; + } + const eqIndex = _base.state.tokens.length - 1; + _expression.parseMaybeAssign.call(void 0, ); + _base.state.tokens[eqIndex].rhsEndIndex = _base.state.tokens.length; +} exports.parseMaybeDefault = parseMaybeDefault; diff --git a/node_modules/sucrase/dist/parser/traverser/lval.mjs b/node_modules/sucrase/dist/parser/traverser/lval.mjs new file mode 100644 index 00000000..330c87c9 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/lval.mjs @@ -0,0 +1,154 @@ +import {flowParseAssignableListItemTypes} from "../plugins/flow"; +import { + tsParseAccessModifier, + tsParseAssignableListItemTypes, + tsParseModifier, +} from "../plugins/typescript"; +import { + eat, + IdentifierRole, + match, + next, + popTypeContext, + pushTypeContext, +} from "../tokenizer/index"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {TokenType, TokenType as tt} from "../tokenizer/types"; +import {isFlowEnabled, isTypeScriptEnabled, state} from "./base"; +import {parseIdentifier, parseMaybeAssign, parseObj} from "./expression"; +import {expect, unexpected} from "./util"; + +export function parseSpread() { + next(); + parseMaybeAssign(false); +} + +export function parseRest(isBlockScope) { + next(); + parseBindingAtom(isBlockScope); +} + +export function parseBindingIdentifier(isBlockScope) { + parseIdentifier(); + markPriorBindingIdentifier(isBlockScope); +} + +export function parseImportedIdentifier() { + parseIdentifier(); + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ImportDeclaration; +} + +export function markPriorBindingIdentifier(isBlockScope) { + if (state.scopeDepth === 0) { + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.TopLevelDeclaration; + } else { + state.tokens[state.tokens.length - 1].identifierRole = isBlockScope + ? IdentifierRole.BlockScopedDeclaration + : IdentifierRole.FunctionScopedDeclaration; + } +} + +// Parses lvalue (assignable) atom. +export function parseBindingAtom(isBlockScope) { + switch (state.type) { + case tt._this: { + // In TypeScript, "this" may be the name of a parameter, so allow it. + const oldIsType = pushTypeContext(0); + next(); + popTypeContext(oldIsType); + return; + } + + case tt._yield: + case tt.name: { + state.type = tt.name; + parseBindingIdentifier(isBlockScope); + return; + } + + case tt.bracketL: { + next(); + parseBindingList(tt.bracketR, isBlockScope, true /* allowEmpty */); + return; + } + + case tt.braceL: + parseObj(true, isBlockScope); + return; + + default: + unexpected(); + } +} + +export function parseBindingList( + close, + isBlockScope, + allowEmpty = false, + allowModifiers = false, +) { + let first = true; + + let hasRemovedComma = false; + const firstItemTokenIndex = state.tokens.length; + + while (!eat(close) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + // After a "this" type in TypeScript, we need to set the following comma (if any) to also be + // a type token so that it will be removed. + if (!hasRemovedComma && state.tokens[firstItemTokenIndex].isType) { + state.tokens[state.tokens.length - 1].isType = true; + hasRemovedComma = true; + } + } + if (allowEmpty && match(tt.comma)) { + // Empty item; nothing further to parse for this item. + } else if (eat(close)) { + break; + } else if (match(tt.ellipsis)) { + parseRest(isBlockScope); + parseAssignableListItemTypes(); + // Support rest element trailing commas allowed by TypeScript <2.9. + eat(TokenType.comma); + expect(close); + break; + } else { + parseAssignableListItem(allowModifiers, isBlockScope); + } + } +} + +function parseAssignableListItem(allowModifiers, isBlockScope) { + if (allowModifiers) { + tsParseAccessModifier(); + tsParseModifier([ContextualKeyword._readonly]); + } + + parseMaybeDefault(isBlockScope); + parseAssignableListItemTypes(); + parseMaybeDefault(isBlockScope, true /* leftAlreadyParsed */); +} + +function parseAssignableListItemTypes() { + if (isFlowEnabled) { + flowParseAssignableListItemTypes(); + } else if (isTypeScriptEnabled) { + tsParseAssignableListItemTypes(); + } +} + +// Parses assignment pattern around given atom if possible. +export function parseMaybeDefault(isBlockScope, leftAlreadyParsed = false) { + if (!leftAlreadyParsed) { + parseBindingAtom(isBlockScope); + } + if (!eat(tt.eq)) { + return; + } + const eqIndex = state.tokens.length - 1; + parseMaybeAssign(); + state.tokens[eqIndex].rhsEndIndex = state.tokens.length; +} diff --git a/node_modules/sucrase/dist/parser/traverser/statement.d.ts b/node_modules/sucrase/dist/parser/traverser/statement.d.ts new file mode 100644 index 00000000..525fa5d7 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/statement.d.ts @@ -0,0 +1,20 @@ +import { File } from "../index"; +import { TokenType } from "../tokenizer/types"; +export declare function parseTopLevel(): File; +export declare function parseStatement(declaration: boolean): void; +export declare function parseDecorators(): void; +export declare function baseParseMaybeDecoratorArguments(): void; +export declare function parseVarStatement(kind: TokenType): void; +export declare function parseBlock(allowDirectives?: boolean, isFunctionScope?: boolean, contextId?: number): void; +export declare function parseBlockBody(end: TokenType): void; +export declare function parseFunction(functionStart: number, isStatement: boolean, optionalId?: boolean): void; +export declare function parseFunctionParams(allowModifiers?: boolean, funcContextId?: number): void; +export declare function parseClass(isStatement: boolean, optionalId?: boolean): void; +export declare function parseClassPropertyName(classContextId: number): void; +export declare function parsePostMemberNameModifiers(): void; +export declare function parseClassProperty(): void; +export declare function parseExport(): void; +export declare function parseExportFrom(): void; +export declare function baseParseExportStar(): void; +export declare function parseExportSpecifiers(): void; +export declare function parseImport(): void; diff --git a/node_modules/sucrase/dist/parser/traverser/statement.js b/node_modules/sucrase/dist/parser/traverser/statement.js new file mode 100644 index 00000000..9384e618 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/statement.js @@ -0,0 +1,1115 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});/* eslint max-len: 0 */ + +var _index = require('../index'); + + + + + + + + + + + + + + + +var _flow = require('../plugins/flow'); + + + + + + + + + + + + + + + + +var _typescript = require('../plugins/typescript'); + + + + + + + + + +var _tokenizer = require('../tokenizer'); +var _keywords = require('../tokenizer/keywords'); +var _state = require('../tokenizer/state'); +var _types = require('../tokenizer/types'); +var _base = require('./base'); + + + + + + + + + + + +var _expression = require('./expression'); + + + + + +var _lval = require('./lval'); + + + + + + + + + +var _util = require('./util'); + + function parseTopLevel() { + parseBlockBody(_types.TokenType.eof); + _base.state.scopes.push(new (0, _state.Scope)(0, _base.state.tokens.length, true)); + if (_base.state.scopeDepth !== 0) { + throw new Error(`Invalid scope depth at end of file: ${_base.state.scopeDepth}`); + } + return new (0, _index.File)(_base.state.tokens, _base.state.scopes); +} exports.parseTopLevel = parseTopLevel; + +// Parse a single statement. +// +// If expecting a statement and finding a slash operator, parse a +// regular expression literal. This is to handle cases like +// `if (foo) /blah/.exec(foo)`, where looking at the previous token +// does not help. + + function parseStatement(declaration) { + if (_base.isFlowEnabled) { + if (_flow.flowTryParseStatement.call(void 0, )) { + return; + } + } + if (_tokenizer.match.call(void 0, _types.TokenType.at)) { + parseDecorators(); + } + parseStatementContent(declaration); +} exports.parseStatement = parseStatement; + +function parseStatementContent(declaration) { + if (_base.isTypeScriptEnabled) { + if (_typescript.tsTryParseStatementContent.call(void 0, )) { + return; + } + } + + const starttype = _base.state.type; + + // Most types of statements are recognized by the keyword they + // start with. Many are trivial to parse, some require a bit of + // complexity. + + switch (starttype) { + case _types.TokenType._break: + case _types.TokenType._continue: + parseBreakContinueStatement(); + return; + case _types.TokenType._debugger: + parseDebuggerStatement(); + return; + case _types.TokenType._do: + parseDoStatement(); + return; + case _types.TokenType._for: + parseForStatement(); + return; + case _types.TokenType._function: + if (_tokenizer.lookaheadType.call(void 0, ) === _types.TokenType.dot) break; + if (!declaration) _util.unexpected.call(void 0, ); + parseFunctionStatement(); + return; + + case _types.TokenType._class: + if (!declaration) _util.unexpected.call(void 0, ); + parseClass(true); + return; + + case _types.TokenType._if: + parseIfStatement(); + return; + case _types.TokenType._return: + parseReturnStatement(); + return; + case _types.TokenType._switch: + parseSwitchStatement(); + return; + case _types.TokenType._throw: + parseThrowStatement(); + return; + case _types.TokenType._try: + parseTryStatement(); + return; + + case _types.TokenType._let: + case _types.TokenType._const: + if (!declaration) _util.unexpected.call(void 0, ); // NOTE: falls through to _var + + case _types.TokenType._var: + parseVarStatement(starttype); + return; + + case _types.TokenType._while: + parseWhileStatement(); + return; + case _types.TokenType.braceL: + parseBlock(); + return; + case _types.TokenType.semi: + parseEmptyStatement(); + return; + case _types.TokenType._export: + case _types.TokenType._import: { + const nextType = _tokenizer.lookaheadType.call(void 0, ); + if (nextType === _types.TokenType.parenL || nextType === _types.TokenType.dot) { + break; + } + _tokenizer.next.call(void 0, ); + if (starttype === _types.TokenType._import) { + parseImport(); + } else { + parseExport(); + } + return; + } + case _types.TokenType.name: + if (_base.state.contextualKeyword === _keywords.ContextualKeyword._async) { + const functionStart = _base.state.start; + // peek ahead and see if next token is a function + const snapshot = _base.state.snapshot(); + _tokenizer.next.call(void 0, ); + if (_tokenizer.match.call(void 0, _types.TokenType._function) && !_util.canInsertSemicolon.call(void 0, )) { + _util.expect.call(void 0, _types.TokenType._function); + parseFunction(functionStart, true); + return; + } else { + _base.state.restoreFromSnapshot(snapshot); + } + } + default: + // Do nothing. + break; + } + + // If the statement does not start with a statement keyword or a + // brace, it's an ExpressionStatement or LabeledStatement. We + // simply start parsing an expression, and afterwards, if the + // next token is a colon and the expression was a simple + // Identifier node, we switch to interpreting it as a label. + const initialTokensLength = _base.state.tokens.length; + _expression.parseExpression.call(void 0, ); + let simpleName = null; + if (_base.state.tokens.length === initialTokensLength + 1) { + const token = _base.state.tokens[_base.state.tokens.length - 1]; + if (token.type === _types.TokenType.name) { + simpleName = token.contextualKeyword; + } + } + if (simpleName == null) { + _util.semicolon.call(void 0, ); + return; + } + if (_tokenizer.eat.call(void 0, _types.TokenType.colon)) { + parseLabeledStatement(); + } else { + // This was an identifier, so we might want to handle flow/typescript-specific cases. + parseIdentifierStatement(simpleName); + } +} + + function parseDecorators() { + while (_tokenizer.match.call(void 0, _types.TokenType.at)) { + parseDecorator(); + } +} exports.parseDecorators = parseDecorators; + +function parseDecorator() { + _tokenizer.next.call(void 0, ); + if (_tokenizer.eat.call(void 0, _types.TokenType.parenL)) { + _expression.parseExpression.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.parenR); + } else { + _expression.parseIdentifier.call(void 0, ); + while (_tokenizer.eat.call(void 0, _types.TokenType.dot)) { + _expression.parseIdentifier.call(void 0, ); + } + } + parseMaybeDecoratorArguments(); +} + +function parseMaybeDecoratorArguments() { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseMaybeDecoratorArguments.call(void 0, ); + } else { + baseParseMaybeDecoratorArguments(); + } +} + + function baseParseMaybeDecoratorArguments() { + if (_tokenizer.eat.call(void 0, _types.TokenType.parenL)) { + _expression.parseCallExpressionArguments.call(void 0, ); + } +} exports.baseParseMaybeDecoratorArguments = baseParseMaybeDecoratorArguments; + +function parseBreakContinueStatement() { + _tokenizer.next.call(void 0, ); + if (!_util.isLineTerminator.call(void 0, )) { + _expression.parseIdentifier.call(void 0, ); + _util.semicolon.call(void 0, ); + } +} + +function parseDebuggerStatement() { + _tokenizer.next.call(void 0, ); + _util.semicolon.call(void 0, ); +} + +function parseDoStatement() { + _tokenizer.next.call(void 0, ); + parseStatement(false); + _util.expect.call(void 0, _types.TokenType._while); + _expression.parseParenExpression.call(void 0, ); + _tokenizer.eat.call(void 0, _types.TokenType.semi); +} + +function parseForStatement() { + _base.state.scopeDepth++; + const startTokenIndex = _base.state.tokens.length; + parseAmbiguousForStatement(); + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, false)); + _base.state.scopeDepth--; +} + +// Disambiguating between a `for` and a `for`/`in` or `for`/`of` +// loop is non-trivial. Basically, we have to parse the init `var` +// statement or expression, disallowing the `in` operator (see +// the second parameter to `parseExpression`), and then check +// whether the next token is `in` or `of`. When there is no init +// part (semicolon immediately after the opening parenthesis), it +// is a regular `for` loop. +function parseAmbiguousForStatement() { + _tokenizer.next.call(void 0, ); + + let forAwait = false; + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._await)) { + forAwait = true; + _tokenizer.next.call(void 0, ); + } + _util.expect.call(void 0, _types.TokenType.parenL); + + if (_tokenizer.match.call(void 0, _types.TokenType.semi)) { + if (forAwait) { + _util.unexpected.call(void 0, ); + } + parseFor(); + return; + } + + if (_tokenizer.match.call(void 0, _types.TokenType._var) || _tokenizer.match.call(void 0, _types.TokenType._let) || _tokenizer.match.call(void 0, _types.TokenType._const)) { + const varKind = _base.state.type; + _tokenizer.next.call(void 0, ); + parseVar(true, varKind); + if (_tokenizer.match.call(void 0, _types.TokenType._in) || _util.isContextual.call(void 0, _keywords.ContextualKeyword._of)) { + parseForIn(forAwait); + return; + } + parseFor(); + return; + } + + _expression.parseExpression.call(void 0, true); + if (_tokenizer.match.call(void 0, _types.TokenType._in) || _util.isContextual.call(void 0, _keywords.ContextualKeyword._of)) { + parseForIn(forAwait); + return; + } + if (forAwait) { + _util.unexpected.call(void 0, ); + } + parseFor(); +} + +function parseFunctionStatement() { + const functionStart = _base.state.start; + _tokenizer.next.call(void 0, ); + parseFunction(functionStart, true); +} + +function parseIfStatement() { + _tokenizer.next.call(void 0, ); + _expression.parseParenExpression.call(void 0, ); + parseStatement(false); + if (_tokenizer.eat.call(void 0, _types.TokenType._else)) { + parseStatement(false); + } +} + +function parseReturnStatement() { + _tokenizer.next.call(void 0, ); + + // In `return` (and `break`/`continue`), the keywords with + // optional arguments, we eagerly look for a semicolon or the + // possibility to insert one. + + if (!_util.isLineTerminator.call(void 0, )) { + _expression.parseExpression.call(void 0, ); + _util.semicolon.call(void 0, ); + } +} + +function parseSwitchStatement() { + _tokenizer.next.call(void 0, ); + _expression.parseParenExpression.call(void 0, ); + _base.state.scopeDepth++; + const startTokenIndex = _base.state.tokens.length; + _util.expect.call(void 0, _types.TokenType.braceL); + + // Don't bother validation; just go through any sequence of cases, defaults, and statements. + while (!_tokenizer.match.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (_tokenizer.match.call(void 0, _types.TokenType._case) || _tokenizer.match.call(void 0, _types.TokenType._default)) { + const isCase = _tokenizer.match.call(void 0, _types.TokenType._case); + _tokenizer.next.call(void 0, ); + if (isCase) { + _expression.parseExpression.call(void 0, ); + } + _util.expect.call(void 0, _types.TokenType.colon); + } else { + parseStatement(true); + } + } + _tokenizer.next.call(void 0, ); // Closing brace + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, false)); + _base.state.scopeDepth--; +} + +function parseThrowStatement() { + _tokenizer.next.call(void 0, ); + _expression.parseExpression.call(void 0, ); + _util.semicolon.call(void 0, ); +} + +function parseTryStatement() { + _tokenizer.next.call(void 0, ); + + parseBlock(); + + if (_tokenizer.match.call(void 0, _types.TokenType._catch)) { + _tokenizer.next.call(void 0, ); + let catchBindingStartTokenIndex = null; + if (_tokenizer.match.call(void 0, _types.TokenType.parenL)) { + _base.state.scopeDepth++; + catchBindingStartTokenIndex = _base.state.tokens.length; + _util.expect.call(void 0, _types.TokenType.parenL); + _lval.parseBindingAtom.call(void 0, true /* isBlockScope */); + _util.expect.call(void 0, _types.TokenType.parenR); + } + parseBlock(); + if (catchBindingStartTokenIndex != null) { + // We need a special scope for the catch binding which includes the binding itself and the + // catch block. + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(catchBindingStartTokenIndex, endTokenIndex, false)); + _base.state.scopeDepth--; + } + } + if (_tokenizer.eat.call(void 0, _types.TokenType._finally)) { + parseBlock(); + } +} + + function parseVarStatement(kind) { + _tokenizer.next.call(void 0, ); + parseVar(false, kind); + _util.semicolon.call(void 0, ); +} exports.parseVarStatement = parseVarStatement; + +function parseWhileStatement() { + _tokenizer.next.call(void 0, ); + _expression.parseParenExpression.call(void 0, ); + parseStatement(false); +} + +function parseEmptyStatement() { + _tokenizer.next.call(void 0, ); +} + +function parseLabeledStatement() { + parseStatement(true); +} + +/** + * Parse a statement starting with an identifier of the given name. Subclasses match on the name + * to handle statements like "declare". + */ +function parseIdentifierStatement(contextualKeyword) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseIdentifierStatement.call(void 0, contextualKeyword); + } else if (_base.isFlowEnabled) { + _flow.flowParseIdentifierStatement.call(void 0, contextualKeyword); + } else { + _util.semicolon.call(void 0, ); + } +} + +// Parse a semicolon-enclosed block of statements, handling `"use +// strict"` declarations when `allowStrict` is true (used for +// function bodies). + + function parseBlock( + allowDirectives = false, + isFunctionScope = false, + contextId = 0, +) { + const startTokenIndex = _base.state.tokens.length; + _base.state.scopeDepth++; + _util.expect.call(void 0, _types.TokenType.braceL); + if (contextId) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + } + parseBlockBody(_types.TokenType.braceR); + if (contextId) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + } + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, isFunctionScope)); + _base.state.scopeDepth--; +} exports.parseBlock = parseBlock; + + function parseBlockBody(end) { + while (!_tokenizer.eat.call(void 0, end) && !_base.state.error) { + parseStatement(true); + } +} exports.parseBlockBody = parseBlockBody; + +// Parse a regular `for` loop. The disambiguation code in +// `parseStatement` will already have parsed the init statement or +// expression. + +function parseFor() { + _util.expect.call(void 0, _types.TokenType.semi); + if (!_tokenizer.match.call(void 0, _types.TokenType.semi)) { + _expression.parseExpression.call(void 0, ); + } + _util.expect.call(void 0, _types.TokenType.semi); + if (!_tokenizer.match.call(void 0, _types.TokenType.parenR)) { + _expression.parseExpression.call(void 0, ); + } + _util.expect.call(void 0, _types.TokenType.parenR); + parseStatement(false); +} + +// Parse a `for`/`in` and `for`/`of` loop, which are almost +// same from parser's perspective. + +function parseForIn(forAwait) { + if (forAwait) { + _util.eatContextual.call(void 0, _keywords.ContextualKeyword._of); + } else { + _tokenizer.next.call(void 0, ); + } + _expression.parseExpression.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.parenR); + parseStatement(false); +} + +// Parse a list of variable declarations. + +function parseVar(isFor, kind) { + while (true) { + const isBlockScope = kind === _types.TokenType._const || kind === _types.TokenType._let; + parseVarHead(isBlockScope); + if (_tokenizer.eat.call(void 0, _types.TokenType.eq)) { + const eqIndex = _base.state.tokens.length - 1; + _expression.parseMaybeAssign.call(void 0, isFor); + _base.state.tokens[eqIndex].rhsEndIndex = _base.state.tokens.length; + } + if (!_tokenizer.eat.call(void 0, _types.TokenType.comma)) { + break; + } + } +} + +function parseVarHead(isBlockScope) { + _lval.parseBindingAtom.call(void 0, isBlockScope); + if (_base.isTypeScriptEnabled) { + _typescript.tsAfterParseVarHead.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowAfterParseVarHead.call(void 0, ); + } +} + +// Parse a function declaration or literal (depending on the +// `isStatement` parameter). + + function parseFunction( + functionStart, + isStatement, + optionalId = false, +) { + if (_tokenizer.match.call(void 0, _types.TokenType.star)) { + _tokenizer.next.call(void 0, ); + } + + if (isStatement && !optionalId && !_tokenizer.match.call(void 0, _types.TokenType.name) && !_tokenizer.match.call(void 0, _types.TokenType._yield)) { + _util.unexpected.call(void 0, ); + } + + let nameScopeStartTokenIndex = null; + + if (_tokenizer.match.call(void 0, _types.TokenType.name)) { + // Expression-style functions should limit their name's scope to the function body, so we make + // a new function scope to enforce that. + if (!isStatement) { + nameScopeStartTokenIndex = _base.state.tokens.length; + _base.state.scopeDepth++; + } + _lval.parseBindingIdentifier.call(void 0, false); + } + + const startTokenIndex = _base.state.tokens.length; + _base.state.scopeDepth++; + parseFunctionParams(); + _expression.parseFunctionBodyAndFinish.call(void 0, functionStart); + const endTokenIndex = _base.state.tokens.length; + // In addition to the block scope of the function body, we need a separate function-style scope + // that includes the params. + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, true)); + _base.state.scopeDepth--; + if (nameScopeStartTokenIndex !== null) { + _base.state.scopes.push(new (0, _state.Scope)(nameScopeStartTokenIndex, endTokenIndex, true)); + _base.state.scopeDepth--; + } +} exports.parseFunction = parseFunction; + + function parseFunctionParams( + allowModifiers = false, + funcContextId = 0, +) { + if (_base.isTypeScriptEnabled) { + _typescript.tsStartParseFunctionParams.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowStartParseFunctionParams.call(void 0, ); + } + + _util.expect.call(void 0, _types.TokenType.parenL); + if (funcContextId) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = funcContextId; + } + _lval.parseBindingList.call(void 0, _types.TokenType.parenR, false /* isBlockScope */, false /* allowEmpty */, allowModifiers); + if (funcContextId) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = funcContextId; + } +} exports.parseFunctionParams = parseFunctionParams; + +// Parse a class declaration or literal (depending on the +// `isStatement` parameter). + + function parseClass(isStatement, optionalId = false) { + // Put a context ID on the class keyword, the open-brace, and the close-brace, so that later + // code can easily navigate to meaningful points on the class. + const contextId = _base.getNextContextId.call(void 0, ); + + _tokenizer.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + _base.state.tokens[_base.state.tokens.length - 1].isExpression = !isStatement; + // Like with functions, we declare a special "name scope" from the start of the name to the end + // of the class, but only with expression-style classes, to represent the fact that the name is + // available to the body of the class but not an outer declaration. + let nameScopeStartTokenIndex = null; + if (!isStatement) { + nameScopeStartTokenIndex = _base.state.tokens.length; + _base.state.scopeDepth++; + } + parseClassId(isStatement, optionalId); + parseClassSuper(); + const openBraceIndex = _base.state.tokens.length; + parseClassBody(contextId); + if (_base.state.error) { + return; + } + _base.state.tokens[openBraceIndex].contextId = contextId; + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + if (nameScopeStartTokenIndex !== null) { + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(nameScopeStartTokenIndex, endTokenIndex, false)); + _base.state.scopeDepth--; + } +} exports.parseClass = parseClass; + +function isClassProperty() { + return _tokenizer.match.call(void 0, _types.TokenType.eq) || _tokenizer.match.call(void 0, _types.TokenType.semi) || _tokenizer.match.call(void 0, _types.TokenType.braceR) || _tokenizer.match.call(void 0, _types.TokenType.bang) || _tokenizer.match.call(void 0, _types.TokenType.colon); +} + +function isClassMethod() { + return _tokenizer.match.call(void 0, _types.TokenType.parenL) || _tokenizer.match.call(void 0, _types.TokenType.lessThan); +} + +function parseClassBody(classContextId) { + _util.expect.call(void 0, _types.TokenType.braceL); + + while (!_tokenizer.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (_tokenizer.eat.call(void 0, _types.TokenType.semi)) { + continue; + } + + if (_tokenizer.match.call(void 0, _types.TokenType.at)) { + parseDecorator(); + continue; + } + const memberStart = _base.state.start; + parseClassMember(memberStart, classContextId); + } +} + +function parseClassMember(memberStart, classContextId) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseAccessModifier.call(void 0, ); + } + let isStatic = false; + if (_tokenizer.match.call(void 0, _types.TokenType.name) && _base.state.contextualKeyword === _keywords.ContextualKeyword._static) { + _expression.parseIdentifier.call(void 0, ); // eats 'static' + if (isClassMethod()) { + parseClassMethod(memberStart, /* isConstructor */ false); + return; + } else if (isClassProperty()) { + parseClassProperty(); + return; + } + // otherwise something static + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._static; + isStatic = true; + } + + parseClassMemberWithIsStatic(memberStart, isStatic, classContextId); +} + +function parseClassMemberWithIsStatic( + memberStart, + isStatic, + classContextId, +) { + if (_base.isTypeScriptEnabled) { + if (_typescript.tsTryParseClassMemberWithIsStatic.call(void 0, isStatic, classContextId)) { + return; + } + } + if (_tokenizer.eat.call(void 0, _types.TokenType.star)) { + // a generator + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, /* isConstructor */ false); + return; + } + + // Get the identifier name so we can tell if it's actually a keyword like "async", "get", or + // "set". + parseClassPropertyName(classContextId); + let isConstructor = false; + const token = _base.state.tokens[_base.state.tokens.length - 1]; + // We allow "constructor" as either an identifier or a string. + if (token.contextualKeyword === _keywords.ContextualKeyword._constructor) { + isConstructor = true; + } + parsePostMemberNameModifiers(); + + if (isClassMethod()) { + parseClassMethod(memberStart, isConstructor); + } else if (isClassProperty()) { + parseClassProperty(); + } else if (token.contextualKeyword === _keywords.ContextualKeyword._async && !_util.isLineTerminator.call(void 0, )) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._async; + // an async method + const isGenerator = _tokenizer.match.call(void 0, _types.TokenType.star); + if (isGenerator) { + _tokenizer.next.call(void 0, ); + } + + // The so-called parsed name would have been "async": get the real name. + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, false /* isConstructor */); + } else if ( + (token.contextualKeyword === _keywords.ContextualKeyword._get || + token.contextualKeyword === _keywords.ContextualKeyword._set) && + !(_util.isLineTerminator.call(void 0, ) && _tokenizer.match.call(void 0, _types.TokenType.star)) + ) { + if (token.contextualKeyword === _keywords.ContextualKeyword._get) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._get; + } else { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._set; + } + // `get\n*` is an uninitialized property named 'get' followed by a generator. + // a getter or setter + // The so-called parsed name would have been "get/set": get the real name. + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, /* isConstructor */ false); + } else if (_util.isLineTerminator.call(void 0, )) { + // an uninitialized class property (due to ASI, since we don't otherwise recognize the next token) + parseClassProperty(); + } else { + _util.unexpected.call(void 0, ); + } +} + +function parseClassMethod(functionStart, isConstructor) { + if (_base.isTypeScriptEnabled) { + _typescript.tsTryParseTypeParameters.call(void 0, ); + } else if (_base.isFlowEnabled) { + if (_tokenizer.match.call(void 0, _types.TokenType.lessThan)) { + _flow.flowParseTypeParameterDeclaration.call(void 0, ); + } + } + _expression.parseMethod.call(void 0, functionStart, isConstructor); +} + +// Return the name of the class property, if it is a simple identifier. + function parseClassPropertyName(classContextId) { + _expression.parsePropertyName.call(void 0, classContextId); +} exports.parseClassPropertyName = parseClassPropertyName; + + function parsePostMemberNameModifiers() { + if (_base.isTypeScriptEnabled) { + const oldIsType = _tokenizer.pushTypeContext.call(void 0, 0); + _tokenizer.eat.call(void 0, _types.TokenType.question); + _tokenizer.popTypeContext.call(void 0, oldIsType); + } +} exports.parsePostMemberNameModifiers = parsePostMemberNameModifiers; + + function parseClassProperty() { + if (_base.isTypeScriptEnabled) { + _tokenizer.eat.call(void 0, _types.TokenType.bang); + _typescript.tsTryParseTypeAnnotation.call(void 0, ); + } else if (_base.isFlowEnabled) { + if (_tokenizer.match.call(void 0, _types.TokenType.colon)) { + _flow.flowParseTypeAnnotation.call(void 0, ); + } + } + + if (_tokenizer.match.call(void 0, _types.TokenType.eq)) { + const equalsTokenIndex = _base.state.tokens.length; + _tokenizer.next.call(void 0, ); + _expression.parseMaybeAssign.call(void 0, ); + _base.state.tokens[equalsTokenIndex].rhsEndIndex = _base.state.tokens.length; + } + _util.semicolon.call(void 0, ); +} exports.parseClassProperty = parseClassProperty; + +function parseClassId(isStatement, optionalId = false) { + if ( + _base.isTypeScriptEnabled && + (!isStatement || optionalId) && + _util.isContextual.call(void 0, _keywords.ContextualKeyword._implements) + ) { + return; + } + + if (_tokenizer.match.call(void 0, _types.TokenType.name)) { + _lval.parseBindingIdentifier.call(void 0, true); + } + + if (_base.isTypeScriptEnabled) { + _typescript.tsTryParseTypeParameters.call(void 0, ); + } else if (_base.isFlowEnabled) { + if (_tokenizer.match.call(void 0, _types.TokenType.lessThan)) { + _flow.flowParseTypeParameterDeclaration.call(void 0, ); + } + } +} + +// Returns true if there was a superclass. +function parseClassSuper() { + let hasSuper = false; + if (_tokenizer.eat.call(void 0, _types.TokenType._extends)) { + _expression.parseExprSubscripts.call(void 0, ); + hasSuper = true; + } else { + hasSuper = false; + } + if (_base.isTypeScriptEnabled) { + _typescript.tsAfterParseClassSuper.call(void 0, hasSuper); + } else if (_base.isFlowEnabled) { + _flow.flowAfterParseClassSuper.call(void 0, hasSuper); + } +} + +// Parses module export declaration. + + function parseExport() { + const exportIndex = _base.state.tokens.length - 1; + if (_base.isTypeScriptEnabled) { + if (_typescript.tsTryParseExport.call(void 0, )) { + return; + } + } + // export * from '...' + if (shouldParseExportStar()) { + parseExportStar(); + } else if (isExportDefaultSpecifier()) { + // export default from + _expression.parseIdentifier.call(void 0, ); + if (_tokenizer.match.call(void 0, _types.TokenType.comma) && _tokenizer.lookaheadType.call(void 0, ) === _types.TokenType.star) { + _util.expect.call(void 0, _types.TokenType.comma); + _util.expect.call(void 0, _types.TokenType.star); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._as); + _expression.parseIdentifier.call(void 0, ); + } else { + parseExportSpecifiersMaybe(); + } + parseExportFrom(); + } else if (_tokenizer.eat.call(void 0, _types.TokenType._default)) { + // export default ... + parseExportDefaultExpression(); + } else if (shouldParseExportDeclaration()) { + parseExportDeclaration(); + } else { + // export { x, y as z } [from '...'] + parseExportSpecifiers(); + parseExportFrom(); + } + _base.state.tokens[exportIndex].rhsEndIndex = _base.state.tokens.length; +} exports.parseExport = parseExport; + +function parseExportDefaultExpression() { + if (_base.isTypeScriptEnabled) { + if (_typescript.tsTryParseExportDefaultExpression.call(void 0, )) { + return; + } + } + const functionStart = _base.state.start; + if (_tokenizer.eat.call(void 0, _types.TokenType._function)) { + parseFunction(functionStart, true, true); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._async) && _tokenizer.lookaheadType.call(void 0, ) === _types.TokenType._function) { + // async function declaration + _util.eatContextual.call(void 0, _keywords.ContextualKeyword._async); + _tokenizer.eat.call(void 0, _types.TokenType._function); + parseFunction(functionStart, true, true); + } else if (_tokenizer.match.call(void 0, _types.TokenType._class)) { + parseClass(true, true); + } else if (_tokenizer.match.call(void 0, _types.TokenType.at)) { + parseDecorators(); + parseClass(true, true); + } else { + _expression.parseMaybeAssign.call(void 0, ); + _util.semicolon.call(void 0, ); + } +} + +function parseExportDeclaration() { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseExportDeclaration.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowParseExportDeclaration.call(void 0, ); + } else { + parseStatement(true); + } +} + +function isExportDefaultSpecifier() { + if (_base.isTypeScriptEnabled && _typescript.tsIsDeclarationStart.call(void 0, )) { + return false; + } else if (_base.isFlowEnabled && _flow.flowShouldDisallowExportDefaultSpecifier.call(void 0, )) { + return false; + } + if (_tokenizer.match.call(void 0, _types.TokenType.name)) { + return _base.state.contextualKeyword !== _keywords.ContextualKeyword._async; + } + + if (!_tokenizer.match.call(void 0, _types.TokenType._default)) { + return false; + } + + const lookahead = _tokenizer.lookaheadTypeAndKeyword.call(void 0, ); + return ( + lookahead.type === _types.TokenType.comma || + (lookahead.type === _types.TokenType.name && lookahead.contextualKeyword === _keywords.ContextualKeyword._from) + ); +} + +function parseExportSpecifiersMaybe() { + if (_tokenizer.eat.call(void 0, _types.TokenType.comma)) { + parseExportSpecifiers(); + } +} + + function parseExportFrom() { + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._from)) { + _expression.parseExprAtom.call(void 0, ); + } + _util.semicolon.call(void 0, ); +} exports.parseExportFrom = parseExportFrom; + +function shouldParseExportStar() { + if (_base.isFlowEnabled) { + return _flow.flowShouldParseExportStar.call(void 0, ); + } else { + return _tokenizer.match.call(void 0, _types.TokenType.star); + } +} + +function parseExportStar() { + if (_base.isFlowEnabled) { + _flow.flowParseExportStar.call(void 0, ); + } else { + baseParseExportStar(); + } +} + + function baseParseExportStar() { + _util.expect.call(void 0, _types.TokenType.star); + + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._as)) { + parseExportNamespace(); + } else { + parseExportFrom(); + } +} exports.baseParseExportStar = baseParseExportStar; + +function parseExportNamespace() { + _tokenizer.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._as; + _expression.parseIdentifier.call(void 0, ); + parseExportSpecifiersMaybe(); + parseExportFrom(); +} + +function shouldParseExportDeclaration() { + return ( + (_base.isTypeScriptEnabled && _typescript.tsIsDeclarationStart.call(void 0, )) || + (_base.isFlowEnabled && _flow.flowShouldParseExportDeclaration.call(void 0, )) || + _base.state.type === _types.TokenType._var || + _base.state.type === _types.TokenType._const || + _base.state.type === _types.TokenType._let || + _base.state.type === _types.TokenType._function || + _base.state.type === _types.TokenType._class || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._async) || + _tokenizer.match.call(void 0, _types.TokenType.at) + ); +} + +// Parses a comma-separated list of module exports. + function parseExportSpecifiers() { + let first = true; + + // export { x, y as z } [from '...'] + _util.expect.call(void 0, _types.TokenType.braceL); + + while (!_tokenizer.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types.TokenType.comma); + if (_tokenizer.eat.call(void 0, _types.TokenType.braceR)) { + break; + } + } + + _expression.parseIdentifier.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _tokenizer.IdentifierRole.ExportAccess; + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._as)) { + _expression.parseIdentifier.call(void 0, ); + } + } +} exports.parseExportSpecifiers = parseExportSpecifiers; + +// Parses import declaration. + + function parseImport() { + if (_base.isTypeScriptEnabled && _tokenizer.match.call(void 0, _types.TokenType.name) && _tokenizer.lookaheadType.call(void 0, ) === _types.TokenType.eq) { + _typescript.tsParseImportEqualsDeclaration.call(void 0, ); + return; + } + + // import '...' + if (_tokenizer.match.call(void 0, _types.TokenType.string)) { + _expression.parseExprAtom.call(void 0, ); + } else { + parseImportSpecifiers(); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._from); + _expression.parseExprAtom.call(void 0, ); + } + _util.semicolon.call(void 0, ); +} exports.parseImport = parseImport; + +// eslint-disable-next-line no-unused-vars +function shouldParseDefaultImport() { + return _tokenizer.match.call(void 0, _types.TokenType.name); +} + +function parseImportSpecifierLocal() { + _lval.parseImportedIdentifier.call(void 0, ); +} + +// Parses a comma-separated list of module imports. +function parseImportSpecifiers() { + if (_base.isFlowEnabled) { + _flow.flowStartParseImportSpecifiers.call(void 0, ); + } + + let first = true; + if (shouldParseDefaultImport()) { + // import defaultObj, { x, y as z } from '...' + parseImportSpecifierLocal(); + + if (!_tokenizer.eat.call(void 0, _types.TokenType.comma)) return; + } + + if (_tokenizer.match.call(void 0, _types.TokenType.star)) { + _tokenizer.next.call(void 0, ); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._as); + + parseImportSpecifierLocal(); + + return; + } + + _util.expect.call(void 0, _types.TokenType.braceL); + while (!_tokenizer.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (first) { + first = false; + } else { + // Detect an attempt to deep destructure + if (_tokenizer.eat.call(void 0, _types.TokenType.colon)) { + _util.unexpected.call(void 0, + "ES2015 named imports do not destructure. Use another statement for destructuring after the import.", + ); + } + + _util.expect.call(void 0, _types.TokenType.comma); + if (_tokenizer.eat.call(void 0, _types.TokenType.braceR)) { + break; + } + } + + parseImportSpecifier(); + } +} + +function parseImportSpecifier() { + if (_base.isFlowEnabled) { + _flow.flowParseImportSpecifier.call(void 0, ); + return; + } + _lval.parseImportedIdentifier.call(void 0, ); + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._as)) { + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _tokenizer.IdentifierRole.ImportAccess; + _tokenizer.next.call(void 0, ); + _lval.parseImportedIdentifier.call(void 0, ); + } +} diff --git a/node_modules/sucrase/dist/parser/traverser/statement.mjs b/node_modules/sucrase/dist/parser/traverser/statement.mjs new file mode 100644 index 00000000..a1f08089 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/statement.mjs @@ -0,0 +1,1115 @@ +/* eslint max-len: 0 */ + +import {File} from "../index"; +import { + flowAfterParseClassSuper, + flowAfterParseVarHead, + flowParseExportDeclaration, + flowParseExportStar, + flowParseIdentifierStatement, + flowParseImportSpecifier, + flowParseTypeAnnotation, + flowParseTypeParameterDeclaration, + flowShouldDisallowExportDefaultSpecifier, + flowShouldParseExportDeclaration, + flowShouldParseExportStar, + flowStartParseFunctionParams, + flowStartParseImportSpecifiers, + flowTryParseStatement, +} from "../plugins/flow"; +import { + tsAfterParseClassSuper, + tsAfterParseVarHead, + tsIsDeclarationStart, + tsParseAccessModifier, + tsParseExportDeclaration, + tsParseIdentifierStatement, + tsParseImportEqualsDeclaration, + tsParseMaybeDecoratorArguments, + tsStartParseFunctionParams, + tsTryParseClassMemberWithIsStatic, + tsTryParseExport, + tsTryParseExportDefaultExpression, + tsTryParseStatementContent, + tsTryParseTypeAnnotation, + tsTryParseTypeParameters, +} from "../plugins/typescript"; +import { + eat, + IdentifierRole, + lookaheadType, + lookaheadTypeAndKeyword, + match, + next, + popTypeContext, + pushTypeContext, +} from "../tokenizer"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {Scope} from "../tokenizer/state"; +import { TokenType as tt} from "../tokenizer/types"; +import {getNextContextId, isFlowEnabled, isTypeScriptEnabled, state} from "./base"; +import { + parseCallExpressionArguments, + parseExprAtom, + parseExpression, + parseExprSubscripts, + parseFunctionBodyAndFinish, + parseIdentifier, + parseMaybeAssign, + parseMethod, + parseParenExpression, + parsePropertyName, +} from "./expression"; +import { + parseBindingAtom, + parseBindingIdentifier, + parseBindingList, + parseImportedIdentifier, +} from "./lval"; +import { + canInsertSemicolon, + eatContextual, + expect, + expectContextual, + isContextual, + isLineTerminator, + semicolon, + unexpected, +} from "./util"; + +export function parseTopLevel() { + parseBlockBody(tt.eof); + state.scopes.push(new Scope(0, state.tokens.length, true)); + if (state.scopeDepth !== 0) { + throw new Error(`Invalid scope depth at end of file: ${state.scopeDepth}`); + } + return new File(state.tokens, state.scopes); +} + +// Parse a single statement. +// +// If expecting a statement and finding a slash operator, parse a +// regular expression literal. This is to handle cases like +// `if (foo) /blah/.exec(foo)`, where looking at the previous token +// does not help. + +export function parseStatement(declaration) { + if (isFlowEnabled) { + if (flowTryParseStatement()) { + return; + } + } + if (match(tt.at)) { + parseDecorators(); + } + parseStatementContent(declaration); +} + +function parseStatementContent(declaration) { + if (isTypeScriptEnabled) { + if (tsTryParseStatementContent()) { + return; + } + } + + const starttype = state.type; + + // Most types of statements are recognized by the keyword they + // start with. Many are trivial to parse, some require a bit of + // complexity. + + switch (starttype) { + case tt._break: + case tt._continue: + parseBreakContinueStatement(); + return; + case tt._debugger: + parseDebuggerStatement(); + return; + case tt._do: + parseDoStatement(); + return; + case tt._for: + parseForStatement(); + return; + case tt._function: + if (lookaheadType() === tt.dot) break; + if (!declaration) unexpected(); + parseFunctionStatement(); + return; + + case tt._class: + if (!declaration) unexpected(); + parseClass(true); + return; + + case tt._if: + parseIfStatement(); + return; + case tt._return: + parseReturnStatement(); + return; + case tt._switch: + parseSwitchStatement(); + return; + case tt._throw: + parseThrowStatement(); + return; + case tt._try: + parseTryStatement(); + return; + + case tt._let: + case tt._const: + if (!declaration) unexpected(); // NOTE: falls through to _var + + case tt._var: + parseVarStatement(starttype); + return; + + case tt._while: + parseWhileStatement(); + return; + case tt.braceL: + parseBlock(); + return; + case tt.semi: + parseEmptyStatement(); + return; + case tt._export: + case tt._import: { + const nextType = lookaheadType(); + if (nextType === tt.parenL || nextType === tt.dot) { + break; + } + next(); + if (starttype === tt._import) { + parseImport(); + } else { + parseExport(); + } + return; + } + case tt.name: + if (state.contextualKeyword === ContextualKeyword._async) { + const functionStart = state.start; + // peek ahead and see if next token is a function + const snapshot = state.snapshot(); + next(); + if (match(tt._function) && !canInsertSemicolon()) { + expect(tt._function); + parseFunction(functionStart, true); + return; + } else { + state.restoreFromSnapshot(snapshot); + } + } + default: + // Do nothing. + break; + } + + // If the statement does not start with a statement keyword or a + // brace, it's an ExpressionStatement or LabeledStatement. We + // simply start parsing an expression, and afterwards, if the + // next token is a colon and the expression was a simple + // Identifier node, we switch to interpreting it as a label. + const initialTokensLength = state.tokens.length; + parseExpression(); + let simpleName = null; + if (state.tokens.length === initialTokensLength + 1) { + const token = state.tokens[state.tokens.length - 1]; + if (token.type === tt.name) { + simpleName = token.contextualKeyword; + } + } + if (simpleName == null) { + semicolon(); + return; + } + if (eat(tt.colon)) { + parseLabeledStatement(); + } else { + // This was an identifier, so we might want to handle flow/typescript-specific cases. + parseIdentifierStatement(simpleName); + } +} + +export function parseDecorators() { + while (match(tt.at)) { + parseDecorator(); + } +} + +function parseDecorator() { + next(); + if (eat(tt.parenL)) { + parseExpression(); + expect(tt.parenR); + } else { + parseIdentifier(); + while (eat(tt.dot)) { + parseIdentifier(); + } + } + parseMaybeDecoratorArguments(); +} + +function parseMaybeDecoratorArguments() { + if (isTypeScriptEnabled) { + tsParseMaybeDecoratorArguments(); + } else { + baseParseMaybeDecoratorArguments(); + } +} + +export function baseParseMaybeDecoratorArguments() { + if (eat(tt.parenL)) { + parseCallExpressionArguments(); + } +} + +function parseBreakContinueStatement() { + next(); + if (!isLineTerminator()) { + parseIdentifier(); + semicolon(); + } +} + +function parseDebuggerStatement() { + next(); + semicolon(); +} + +function parseDoStatement() { + next(); + parseStatement(false); + expect(tt._while); + parseParenExpression(); + eat(tt.semi); +} + +function parseForStatement() { + state.scopeDepth++; + const startTokenIndex = state.tokens.length; + parseAmbiguousForStatement(); + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, false)); + state.scopeDepth--; +} + +// Disambiguating between a `for` and a `for`/`in` or `for`/`of` +// loop is non-trivial. Basically, we have to parse the init `var` +// statement or expression, disallowing the `in` operator (see +// the second parameter to `parseExpression`), and then check +// whether the next token is `in` or `of`. When there is no init +// part (semicolon immediately after the opening parenthesis), it +// is a regular `for` loop. +function parseAmbiguousForStatement() { + next(); + + let forAwait = false; + if (isContextual(ContextualKeyword._await)) { + forAwait = true; + next(); + } + expect(tt.parenL); + + if (match(tt.semi)) { + if (forAwait) { + unexpected(); + } + parseFor(); + return; + } + + if (match(tt._var) || match(tt._let) || match(tt._const)) { + const varKind = state.type; + next(); + parseVar(true, varKind); + if (match(tt._in) || isContextual(ContextualKeyword._of)) { + parseForIn(forAwait); + return; + } + parseFor(); + return; + } + + parseExpression(true); + if (match(tt._in) || isContextual(ContextualKeyword._of)) { + parseForIn(forAwait); + return; + } + if (forAwait) { + unexpected(); + } + parseFor(); +} + +function parseFunctionStatement() { + const functionStart = state.start; + next(); + parseFunction(functionStart, true); +} + +function parseIfStatement() { + next(); + parseParenExpression(); + parseStatement(false); + if (eat(tt._else)) { + parseStatement(false); + } +} + +function parseReturnStatement() { + next(); + + // In `return` (and `break`/`continue`), the keywords with + // optional arguments, we eagerly look for a semicolon or the + // possibility to insert one. + + if (!isLineTerminator()) { + parseExpression(); + semicolon(); + } +} + +function parseSwitchStatement() { + next(); + parseParenExpression(); + state.scopeDepth++; + const startTokenIndex = state.tokens.length; + expect(tt.braceL); + + // Don't bother validation; just go through any sequence of cases, defaults, and statements. + while (!match(tt.braceR) && !state.error) { + if (match(tt._case) || match(tt._default)) { + const isCase = match(tt._case); + next(); + if (isCase) { + parseExpression(); + } + expect(tt.colon); + } else { + parseStatement(true); + } + } + next(); // Closing brace + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, false)); + state.scopeDepth--; +} + +function parseThrowStatement() { + next(); + parseExpression(); + semicolon(); +} + +function parseTryStatement() { + next(); + + parseBlock(); + + if (match(tt._catch)) { + next(); + let catchBindingStartTokenIndex = null; + if (match(tt.parenL)) { + state.scopeDepth++; + catchBindingStartTokenIndex = state.tokens.length; + expect(tt.parenL); + parseBindingAtom(true /* isBlockScope */); + expect(tt.parenR); + } + parseBlock(); + if (catchBindingStartTokenIndex != null) { + // We need a special scope for the catch binding which includes the binding itself and the + // catch block. + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(catchBindingStartTokenIndex, endTokenIndex, false)); + state.scopeDepth--; + } + } + if (eat(tt._finally)) { + parseBlock(); + } +} + +export function parseVarStatement(kind) { + next(); + parseVar(false, kind); + semicolon(); +} + +function parseWhileStatement() { + next(); + parseParenExpression(); + parseStatement(false); +} + +function parseEmptyStatement() { + next(); +} + +function parseLabeledStatement() { + parseStatement(true); +} + +/** + * Parse a statement starting with an identifier of the given name. Subclasses match on the name + * to handle statements like "declare". + */ +function parseIdentifierStatement(contextualKeyword) { + if (isTypeScriptEnabled) { + tsParseIdentifierStatement(contextualKeyword); + } else if (isFlowEnabled) { + flowParseIdentifierStatement(contextualKeyword); + } else { + semicolon(); + } +} + +// Parse a semicolon-enclosed block of statements, handling `"use +// strict"` declarations when `allowStrict` is true (used for +// function bodies). + +export function parseBlock( + allowDirectives = false, + isFunctionScope = false, + contextId = 0, +) { + const startTokenIndex = state.tokens.length; + state.scopeDepth++; + expect(tt.braceL); + if (contextId) { + state.tokens[state.tokens.length - 1].contextId = contextId; + } + parseBlockBody(tt.braceR); + if (contextId) { + state.tokens[state.tokens.length - 1].contextId = contextId; + } + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, isFunctionScope)); + state.scopeDepth--; +} + +export function parseBlockBody(end) { + while (!eat(end) && !state.error) { + parseStatement(true); + } +} + +// Parse a regular `for` loop. The disambiguation code in +// `parseStatement` will already have parsed the init statement or +// expression. + +function parseFor() { + expect(tt.semi); + if (!match(tt.semi)) { + parseExpression(); + } + expect(tt.semi); + if (!match(tt.parenR)) { + parseExpression(); + } + expect(tt.parenR); + parseStatement(false); +} + +// Parse a `for`/`in` and `for`/`of` loop, which are almost +// same from parser's perspective. + +function parseForIn(forAwait) { + if (forAwait) { + eatContextual(ContextualKeyword._of); + } else { + next(); + } + parseExpression(); + expect(tt.parenR); + parseStatement(false); +} + +// Parse a list of variable declarations. + +function parseVar(isFor, kind) { + while (true) { + const isBlockScope = kind === tt._const || kind === tt._let; + parseVarHead(isBlockScope); + if (eat(tt.eq)) { + const eqIndex = state.tokens.length - 1; + parseMaybeAssign(isFor); + state.tokens[eqIndex].rhsEndIndex = state.tokens.length; + } + if (!eat(tt.comma)) { + break; + } + } +} + +function parseVarHead(isBlockScope) { + parseBindingAtom(isBlockScope); + if (isTypeScriptEnabled) { + tsAfterParseVarHead(); + } else if (isFlowEnabled) { + flowAfterParseVarHead(); + } +} + +// Parse a function declaration or literal (depending on the +// `isStatement` parameter). + +export function parseFunction( + functionStart, + isStatement, + optionalId = false, +) { + if (match(tt.star)) { + next(); + } + + if (isStatement && !optionalId && !match(tt.name) && !match(tt._yield)) { + unexpected(); + } + + let nameScopeStartTokenIndex = null; + + if (match(tt.name)) { + // Expression-style functions should limit their name's scope to the function body, so we make + // a new function scope to enforce that. + if (!isStatement) { + nameScopeStartTokenIndex = state.tokens.length; + state.scopeDepth++; + } + parseBindingIdentifier(false); + } + + const startTokenIndex = state.tokens.length; + state.scopeDepth++; + parseFunctionParams(); + parseFunctionBodyAndFinish(functionStart); + const endTokenIndex = state.tokens.length; + // In addition to the block scope of the function body, we need a separate function-style scope + // that includes the params. + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, true)); + state.scopeDepth--; + if (nameScopeStartTokenIndex !== null) { + state.scopes.push(new Scope(nameScopeStartTokenIndex, endTokenIndex, true)); + state.scopeDepth--; + } +} + +export function parseFunctionParams( + allowModifiers = false, + funcContextId = 0, +) { + if (isTypeScriptEnabled) { + tsStartParseFunctionParams(); + } else if (isFlowEnabled) { + flowStartParseFunctionParams(); + } + + expect(tt.parenL); + if (funcContextId) { + state.tokens[state.tokens.length - 1].contextId = funcContextId; + } + parseBindingList(tt.parenR, false /* isBlockScope */, false /* allowEmpty */, allowModifiers); + if (funcContextId) { + state.tokens[state.tokens.length - 1].contextId = funcContextId; + } +} + +// Parse a class declaration or literal (depending on the +// `isStatement` parameter). + +export function parseClass(isStatement, optionalId = false) { + // Put a context ID on the class keyword, the open-brace, and the close-brace, so that later + // code can easily navigate to meaningful points on the class. + const contextId = getNextContextId(); + + next(); + state.tokens[state.tokens.length - 1].contextId = contextId; + state.tokens[state.tokens.length - 1].isExpression = !isStatement; + // Like with functions, we declare a special "name scope" from the start of the name to the end + // of the class, but only with expression-style classes, to represent the fact that the name is + // available to the body of the class but not an outer declaration. + let nameScopeStartTokenIndex = null; + if (!isStatement) { + nameScopeStartTokenIndex = state.tokens.length; + state.scopeDepth++; + } + parseClassId(isStatement, optionalId); + parseClassSuper(); + const openBraceIndex = state.tokens.length; + parseClassBody(contextId); + if (state.error) { + return; + } + state.tokens[openBraceIndex].contextId = contextId; + state.tokens[state.tokens.length - 1].contextId = contextId; + if (nameScopeStartTokenIndex !== null) { + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(nameScopeStartTokenIndex, endTokenIndex, false)); + state.scopeDepth--; + } +} + +function isClassProperty() { + return match(tt.eq) || match(tt.semi) || match(tt.braceR) || match(tt.bang) || match(tt.colon); +} + +function isClassMethod() { + return match(tt.parenL) || match(tt.lessThan); +} + +function parseClassBody(classContextId) { + expect(tt.braceL); + + while (!eat(tt.braceR) && !state.error) { + if (eat(tt.semi)) { + continue; + } + + if (match(tt.at)) { + parseDecorator(); + continue; + } + const memberStart = state.start; + parseClassMember(memberStart, classContextId); + } +} + +function parseClassMember(memberStart, classContextId) { + if (isTypeScriptEnabled) { + tsParseAccessModifier(); + } + let isStatic = false; + if (match(tt.name) && state.contextualKeyword === ContextualKeyword._static) { + parseIdentifier(); // eats 'static' + if (isClassMethod()) { + parseClassMethod(memberStart, /* isConstructor */ false); + return; + } else if (isClassProperty()) { + parseClassProperty(); + return; + } + // otherwise something static + state.tokens[state.tokens.length - 1].type = tt._static; + isStatic = true; + } + + parseClassMemberWithIsStatic(memberStart, isStatic, classContextId); +} + +function parseClassMemberWithIsStatic( + memberStart, + isStatic, + classContextId, +) { + if (isTypeScriptEnabled) { + if (tsTryParseClassMemberWithIsStatic(isStatic, classContextId)) { + return; + } + } + if (eat(tt.star)) { + // a generator + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, /* isConstructor */ false); + return; + } + + // Get the identifier name so we can tell if it's actually a keyword like "async", "get", or + // "set". + parseClassPropertyName(classContextId); + let isConstructor = false; + const token = state.tokens[state.tokens.length - 1]; + // We allow "constructor" as either an identifier or a string. + if (token.contextualKeyword === ContextualKeyword._constructor) { + isConstructor = true; + } + parsePostMemberNameModifiers(); + + if (isClassMethod()) { + parseClassMethod(memberStart, isConstructor); + } else if (isClassProperty()) { + parseClassProperty(); + } else if (token.contextualKeyword === ContextualKeyword._async && !isLineTerminator()) { + state.tokens[state.tokens.length - 1].type = tt._async; + // an async method + const isGenerator = match(tt.star); + if (isGenerator) { + next(); + } + + // The so-called parsed name would have been "async": get the real name. + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, false /* isConstructor */); + } else if ( + (token.contextualKeyword === ContextualKeyword._get || + token.contextualKeyword === ContextualKeyword._set) && + !(isLineTerminator() && match(tt.star)) + ) { + if (token.contextualKeyword === ContextualKeyword._get) { + state.tokens[state.tokens.length - 1].type = tt._get; + } else { + state.tokens[state.tokens.length - 1].type = tt._set; + } + // `get\n*` is an uninitialized property named 'get' followed by a generator. + // a getter or setter + // The so-called parsed name would have been "get/set": get the real name. + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, /* isConstructor */ false); + } else if (isLineTerminator()) { + // an uninitialized class property (due to ASI, since we don't otherwise recognize the next token) + parseClassProperty(); + } else { + unexpected(); + } +} + +function parseClassMethod(functionStart, isConstructor) { + if (isTypeScriptEnabled) { + tsTryParseTypeParameters(); + } else if (isFlowEnabled) { + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + } + parseMethod(functionStart, isConstructor); +} + +// Return the name of the class property, if it is a simple identifier. +export function parseClassPropertyName(classContextId) { + parsePropertyName(classContextId); +} + +export function parsePostMemberNameModifiers() { + if (isTypeScriptEnabled) { + const oldIsType = pushTypeContext(0); + eat(tt.question); + popTypeContext(oldIsType); + } +} + +export function parseClassProperty() { + if (isTypeScriptEnabled) { + eat(tt.bang); + tsTryParseTypeAnnotation(); + } else if (isFlowEnabled) { + if (match(tt.colon)) { + flowParseTypeAnnotation(); + } + } + + if (match(tt.eq)) { + const equalsTokenIndex = state.tokens.length; + next(); + parseMaybeAssign(); + state.tokens[equalsTokenIndex].rhsEndIndex = state.tokens.length; + } + semicolon(); +} + +function parseClassId(isStatement, optionalId = false) { + if ( + isTypeScriptEnabled && + (!isStatement || optionalId) && + isContextual(ContextualKeyword._implements) + ) { + return; + } + + if (match(tt.name)) { + parseBindingIdentifier(true); + } + + if (isTypeScriptEnabled) { + tsTryParseTypeParameters(); + } else if (isFlowEnabled) { + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + } +} + +// Returns true if there was a superclass. +function parseClassSuper() { + let hasSuper = false; + if (eat(tt._extends)) { + parseExprSubscripts(); + hasSuper = true; + } else { + hasSuper = false; + } + if (isTypeScriptEnabled) { + tsAfterParseClassSuper(hasSuper); + } else if (isFlowEnabled) { + flowAfterParseClassSuper(hasSuper); + } +} + +// Parses module export declaration. + +export function parseExport() { + const exportIndex = state.tokens.length - 1; + if (isTypeScriptEnabled) { + if (tsTryParseExport()) { + return; + } + } + // export * from '...' + if (shouldParseExportStar()) { + parseExportStar(); + } else if (isExportDefaultSpecifier()) { + // export default from + parseIdentifier(); + if (match(tt.comma) && lookaheadType() === tt.star) { + expect(tt.comma); + expect(tt.star); + expectContextual(ContextualKeyword._as); + parseIdentifier(); + } else { + parseExportSpecifiersMaybe(); + } + parseExportFrom(); + } else if (eat(tt._default)) { + // export default ... + parseExportDefaultExpression(); + } else if (shouldParseExportDeclaration()) { + parseExportDeclaration(); + } else { + // export { x, y as z } [from '...'] + parseExportSpecifiers(); + parseExportFrom(); + } + state.tokens[exportIndex].rhsEndIndex = state.tokens.length; +} + +function parseExportDefaultExpression() { + if (isTypeScriptEnabled) { + if (tsTryParseExportDefaultExpression()) { + return; + } + } + const functionStart = state.start; + if (eat(tt._function)) { + parseFunction(functionStart, true, true); + } else if (isContextual(ContextualKeyword._async) && lookaheadType() === tt._function) { + // async function declaration + eatContextual(ContextualKeyword._async); + eat(tt._function); + parseFunction(functionStart, true, true); + } else if (match(tt._class)) { + parseClass(true, true); + } else if (match(tt.at)) { + parseDecorators(); + parseClass(true, true); + } else { + parseMaybeAssign(); + semicolon(); + } +} + +function parseExportDeclaration() { + if (isTypeScriptEnabled) { + tsParseExportDeclaration(); + } else if (isFlowEnabled) { + flowParseExportDeclaration(); + } else { + parseStatement(true); + } +} + +function isExportDefaultSpecifier() { + if (isTypeScriptEnabled && tsIsDeclarationStart()) { + return false; + } else if (isFlowEnabled && flowShouldDisallowExportDefaultSpecifier()) { + return false; + } + if (match(tt.name)) { + return state.contextualKeyword !== ContextualKeyword._async; + } + + if (!match(tt._default)) { + return false; + } + + const lookahead = lookaheadTypeAndKeyword(); + return ( + lookahead.type === tt.comma || + (lookahead.type === tt.name && lookahead.contextualKeyword === ContextualKeyword._from) + ); +} + +function parseExportSpecifiersMaybe() { + if (eat(tt.comma)) { + parseExportSpecifiers(); + } +} + +export function parseExportFrom() { + if (eatContextual(ContextualKeyword._from)) { + parseExprAtom(); + } + semicolon(); +} + +function shouldParseExportStar() { + if (isFlowEnabled) { + return flowShouldParseExportStar(); + } else { + return match(tt.star); + } +} + +function parseExportStar() { + if (isFlowEnabled) { + flowParseExportStar(); + } else { + baseParseExportStar(); + } +} + +export function baseParseExportStar() { + expect(tt.star); + + if (isContextual(ContextualKeyword._as)) { + parseExportNamespace(); + } else { + parseExportFrom(); + } +} + +function parseExportNamespace() { + next(); + state.tokens[state.tokens.length - 1].type = tt._as; + parseIdentifier(); + parseExportSpecifiersMaybe(); + parseExportFrom(); +} + +function shouldParseExportDeclaration() { + return ( + (isTypeScriptEnabled && tsIsDeclarationStart()) || + (isFlowEnabled && flowShouldParseExportDeclaration()) || + state.type === tt._var || + state.type === tt._const || + state.type === tt._let || + state.type === tt._function || + state.type === tt._class || + isContextual(ContextualKeyword._async) || + match(tt.at) + ); +} + +// Parses a comma-separated list of module exports. +export function parseExportSpecifiers() { + let first = true; + + // export { x, y as z } [from '...'] + expect(tt.braceL); + + while (!eat(tt.braceR) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (eat(tt.braceR)) { + break; + } + } + + parseIdentifier(); + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ExportAccess; + if (eatContextual(ContextualKeyword._as)) { + parseIdentifier(); + } + } +} + +// Parses import declaration. + +export function parseImport() { + if (isTypeScriptEnabled && match(tt.name) && lookaheadType() === tt.eq) { + tsParseImportEqualsDeclaration(); + return; + } + + // import '...' + if (match(tt.string)) { + parseExprAtom(); + } else { + parseImportSpecifiers(); + expectContextual(ContextualKeyword._from); + parseExprAtom(); + } + semicolon(); +} + +// eslint-disable-next-line no-unused-vars +function shouldParseDefaultImport() { + return match(tt.name); +} + +function parseImportSpecifierLocal() { + parseImportedIdentifier(); +} + +// Parses a comma-separated list of module imports. +function parseImportSpecifiers() { + if (isFlowEnabled) { + flowStartParseImportSpecifiers(); + } + + let first = true; + if (shouldParseDefaultImport()) { + // import defaultObj, { x, y as z } from '...' + parseImportSpecifierLocal(); + + if (!eat(tt.comma)) return; + } + + if (match(tt.star)) { + next(); + expectContextual(ContextualKeyword._as); + + parseImportSpecifierLocal(); + + return; + } + + expect(tt.braceL); + while (!eat(tt.braceR) && !state.error) { + if (first) { + first = false; + } else { + // Detect an attempt to deep destructure + if (eat(tt.colon)) { + unexpected( + "ES2015 named imports do not destructure. Use another statement for destructuring after the import.", + ); + } + + expect(tt.comma); + if (eat(tt.braceR)) { + break; + } + } + + parseImportSpecifier(); + } +} + +function parseImportSpecifier() { + if (isFlowEnabled) { + flowParseImportSpecifier(); + return; + } + parseImportedIdentifier(); + if (isContextual(ContextualKeyword._as)) { + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ImportAccess; + next(); + parseImportedIdentifier(); + } +} diff --git a/node_modules/sucrase/dist/parser/traverser/util.d.ts b/node_modules/sucrase/dist/parser/traverser/util.d.ts new file mode 100644 index 00000000..8a40164a --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/util.d.ts @@ -0,0 +1,16 @@ +import { ContextualKeyword } from "../tokenizer/keywords"; +import { TokenType } from "../tokenizer/types"; +export declare function isContextual(contextualKeyword: ContextualKeyword): boolean; +export declare function isLookaheadContextual(contextualKeyword: ContextualKeyword): boolean; +export declare function eatContextual(contextualKeyword: ContextualKeyword): boolean; +export declare function expectContextual(contextualKeyword: ContextualKeyword): void; +export declare function canInsertSemicolon(): boolean; +export declare function hasPrecedingLineBreak(): boolean; +export declare function isLineTerminator(): boolean; +export declare function semicolon(): void; +export declare function expect(type: TokenType): void; +/** + * Transition the parser to an error state. All code needs to be written to naturally unwind in this + * state, which allows us to backtrack without exceptions and without error plumbing everywhere. + */ +export declare function unexpected(message?: string, pos?: number): void; diff --git a/node_modules/sucrase/dist/parser/traverser/util.js b/node_modules/sucrase/dist/parser/traverser/util.js new file mode 100644 index 00000000..5460ab86 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/util.js @@ -0,0 +1,88 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _index = require('../tokenizer/index'); + +var _types = require('../tokenizer/types'); +var _charcodes = require('../util/charcodes'); +var _base = require('./base'); + +// ## Parser utilities + +// Tests whether parsed token is a contextual keyword. + function isContextual(contextualKeyword) { + return _base.state.contextualKeyword === contextualKeyword; +} exports.isContextual = isContextual; + + function isLookaheadContextual(contextualKeyword) { + const l = _index.lookaheadTypeAndKeyword.call(void 0, ); + return l.type === _types.TokenType.name && l.contextualKeyword === contextualKeyword; +} exports.isLookaheadContextual = isLookaheadContextual; + +// Consumes contextual keyword if possible. + function eatContextual(contextualKeyword) { + return _base.state.contextualKeyword === contextualKeyword && _index.eat.call(void 0, _types.TokenType.name); +} exports.eatContextual = eatContextual; + +// Asserts that following token is given contextual keyword. + function expectContextual(contextualKeyword) { + if (!eatContextual(contextualKeyword)) { + unexpected(); + } +} exports.expectContextual = expectContextual; + +// Test whether a semicolon can be inserted at the current position. + function canInsertSemicolon() { + return _index.match.call(void 0, _types.TokenType.eof) || _index.match.call(void 0, _types.TokenType.braceR) || hasPrecedingLineBreak(); +} exports.canInsertSemicolon = canInsertSemicolon; + + function hasPrecedingLineBreak() { + const prevToken = _base.state.tokens[_base.state.tokens.length - 1]; + const lastTokEnd = prevToken ? prevToken.end : 0; + for (let i = lastTokEnd; i < _base.state.start; i++) { + const code = _base.input.charCodeAt(i); + if ( + code === _charcodes.charCodes.lineFeed || + code === _charcodes.charCodes.carriageReturn || + code === 0x2028 || + code === 0x2029 + ) { + return true; + } + } + return false; +} exports.hasPrecedingLineBreak = hasPrecedingLineBreak; + + function isLineTerminator() { + return _index.eat.call(void 0, _types.TokenType.semi) || canInsertSemicolon(); +} exports.isLineTerminator = isLineTerminator; + +// Consume a semicolon, or, failing that, see if we are allowed to +// pretend that there is a semicolon at this position. + function semicolon() { + if (!isLineTerminator()) { + unexpected('Unexpected token, expected ";"'); + } +} exports.semicolon = semicolon; + +// Expect a token of a given type. If found, consume it, otherwise, +// raise an unexpected token error at given pos. + function expect(type) { + const matched = _index.eat.call(void 0, type); + if (!matched) { + unexpected(`Unexpected token, expected "${_types.formatTokenType.call(void 0, type)}"`); + } +} exports.expect = expect; + +/** + * Transition the parser to an error state. All code needs to be written to naturally unwind in this + * state, which allows us to backtrack without exceptions and without error plumbing everywhere. + */ + function unexpected(message = "Unexpected token", pos = _base.state.start) { + if (_base.state.error) { + return; + } + // tslint:disable-next-line no-any + const err = new SyntaxError(message); + err.pos = pos; + _base.state.error = err; + _base.state.pos = _base.input.length; + _index.finishToken.call(void 0, _types.TokenType.eof); +} exports.unexpected = unexpected; diff --git a/node_modules/sucrase/dist/parser/traverser/util.mjs b/node_modules/sucrase/dist/parser/traverser/util.mjs new file mode 100644 index 00000000..74f8eeaf --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/util.mjs @@ -0,0 +1,88 @@ +import {eat, finishToken, lookaheadTypeAndKeyword, match} from "../tokenizer/index"; + +import {formatTokenType, TokenType as tt} from "../tokenizer/types"; +import {charCodes} from "../util/charcodes"; +import {input, state} from "./base"; + +// ## Parser utilities + +// Tests whether parsed token is a contextual keyword. +export function isContextual(contextualKeyword) { + return state.contextualKeyword === contextualKeyword; +} + +export function isLookaheadContextual(contextualKeyword) { + const l = lookaheadTypeAndKeyword(); + return l.type === tt.name && l.contextualKeyword === contextualKeyword; +} + +// Consumes contextual keyword if possible. +export function eatContextual(contextualKeyword) { + return state.contextualKeyword === contextualKeyword && eat(tt.name); +} + +// Asserts that following token is given contextual keyword. +export function expectContextual(contextualKeyword) { + if (!eatContextual(contextualKeyword)) { + unexpected(); + } +} + +// Test whether a semicolon can be inserted at the current position. +export function canInsertSemicolon() { + return match(tt.eof) || match(tt.braceR) || hasPrecedingLineBreak(); +} + +export function hasPrecedingLineBreak() { + const prevToken = state.tokens[state.tokens.length - 1]; + const lastTokEnd = prevToken ? prevToken.end : 0; + for (let i = lastTokEnd; i < state.start; i++) { + const code = input.charCodeAt(i); + if ( + code === charCodes.lineFeed || + code === charCodes.carriageReturn || + code === 0x2028 || + code === 0x2029 + ) { + return true; + } + } + return false; +} + +export function isLineTerminator() { + return eat(tt.semi) || canInsertSemicolon(); +} + +// Consume a semicolon, or, failing that, see if we are allowed to +// pretend that there is a semicolon at this position. +export function semicolon() { + if (!isLineTerminator()) { + unexpected('Unexpected token, expected ";"'); + } +} + +// Expect a token of a given type. If found, consume it, otherwise, +// raise an unexpected token error at given pos. +export function expect(type) { + const matched = eat(type); + if (!matched) { + unexpected(`Unexpected token, expected "${formatTokenType(type)}"`); + } +} + +/** + * Transition the parser to an error state. All code needs to be written to naturally unwind in this + * state, which allows us to backtrack without exceptions and without error plumbing everywhere. + */ +export function unexpected(message = "Unexpected token", pos = state.start) { + if (state.error) { + return; + } + // tslint:disable-next-line no-any + const err = new SyntaxError(message); + err.pos = pos; + state.error = err; + state.pos = input.length; + finishToken(tt.eof); +} diff --git a/node_modules/sucrase/dist/parser/util/charcodes.d.ts b/node_modules/sucrase/dist/parser/util/charcodes.d.ts new file mode 100644 index 00000000..baa199e9 --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/charcodes.d.ts @@ -0,0 +1,106 @@ +export declare enum charCodes { + backSpace = 8, + lineFeed = 10, + carriageReturn = 13, + shiftOut = 14, + space = 32, + exclamationMark = 33, + quotationMark = 34, + numberSign = 35, + dollarSign = 36, + percentSign = 37, + ampersand = 38, + apostrophe = 39, + leftParenthesis = 40, + rightParenthesis = 41, + asterisk = 42, + plusSign = 43, + comma = 44, + dash = 45, + dot = 46, + slash = 47, + digit0 = 48, + digit1 = 49, + digit2 = 50, + digit3 = 51, + digit4 = 52, + digit5 = 53, + digit6 = 54, + digit7 = 55, + digit8 = 56, + digit9 = 57, + colon = 58, + semicolon = 59, + lessThan = 60, + equalsTo = 61, + greaterThan = 62, + questionMark = 63, + atSign = 64, + uppercaseA = 65, + uppercaseB = 66, + uppercaseC = 67, + uppercaseD = 68, + uppercaseE = 69, + uppercaseF = 70, + uppercaseG = 71, + uppercaseH = 72, + uppercaseI = 73, + uppercaseJ = 74, + uppercaseK = 75, + uppercaseL = 76, + uppercaseM = 77, + uppercaseN = 78, + uppercaseO = 79, + uppercaseP = 80, + uppercaseQ = 81, + uppercaseR = 82, + uppercaseS = 83, + uppercaseT = 84, + uppercaseU = 85, + uppercaseV = 86, + uppercaseW = 87, + uppercaseX = 88, + uppercaseY = 89, + uppercaseZ = 90, + leftSquareBracket = 91, + backslash = 92, + rightSquareBracket = 93, + caret = 94, + underscore = 95, + graveAccent = 96, + lowercaseA = 97, + lowercaseB = 98, + lowercaseC = 99, + lowercaseD = 100, + lowercaseE = 101, + lowercaseF = 102, + lowercaseG = 103, + lowercaseH = 104, + lowercaseI = 105, + lowercaseJ = 106, + lowercaseK = 107, + lowercaseL = 108, + lowercaseM = 109, + lowercaseN = 110, + lowercaseO = 111, + lowercaseP = 112, + lowercaseQ = 113, + lowercaseR = 114, + lowercaseS = 115, + lowercaseT = 116, + lowercaseU = 117, + lowercaseV = 118, + lowercaseW = 119, + lowercaseX = 120, + lowercaseY = 121, + lowercaseZ = 122, + leftCurlyBrace = 123, + verticalBar = 124, + rightCurlyBrace = 125, + tilde = 126, + nonBreakingSpace = 160, + oghamSpaceMark = 5760, + lineSeparator = 8232, + paragraphSeparator = 8233 +} +export declare function isDigit(code: number): boolean; diff --git a/node_modules/sucrase/dist/parser/util/charcodes.js b/node_modules/sucrase/dist/parser/util/charcodes.js new file mode 100644 index 00000000..d3d90510 --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/charcodes.js @@ -0,0 +1,114 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var charCodes; (function (charCodes) { + const backSpace = 8; charCodes[charCodes["backSpace"] = backSpace] = "backSpace"; + const lineFeed = 10; charCodes[charCodes["lineFeed"] = lineFeed] = "lineFeed"; // '\n' + const carriageReturn = 13; charCodes[charCodes["carriageReturn"] = carriageReturn] = "carriageReturn"; // '\r' + const shiftOut = 14; charCodes[charCodes["shiftOut"] = shiftOut] = "shiftOut"; + const space = 32; charCodes[charCodes["space"] = space] = "space"; + const exclamationMark = 33; charCodes[charCodes["exclamationMark"] = exclamationMark] = "exclamationMark"; // '!' + const quotationMark = 34; charCodes[charCodes["quotationMark"] = quotationMark] = "quotationMark"; // '"' + const numberSign = 35; charCodes[charCodes["numberSign"] = numberSign] = "numberSign"; // '#' + const dollarSign = 36; charCodes[charCodes["dollarSign"] = dollarSign] = "dollarSign"; // '$' + const percentSign = 37; charCodes[charCodes["percentSign"] = percentSign] = "percentSign"; // '%' + const ampersand = 38; charCodes[charCodes["ampersand"] = ampersand] = "ampersand"; // '&' + const apostrophe = 39; charCodes[charCodes["apostrophe"] = apostrophe] = "apostrophe"; // ''' + const leftParenthesis = 40; charCodes[charCodes["leftParenthesis"] = leftParenthesis] = "leftParenthesis"; // '(' + const rightParenthesis = 41; charCodes[charCodes["rightParenthesis"] = rightParenthesis] = "rightParenthesis"; // ')' + const asterisk = 42; charCodes[charCodes["asterisk"] = asterisk] = "asterisk"; // '*' + const plusSign = 43; charCodes[charCodes["plusSign"] = plusSign] = "plusSign"; // '+' + const comma = 44; charCodes[charCodes["comma"] = comma] = "comma"; // ',' + const dash = 45; charCodes[charCodes["dash"] = dash] = "dash"; // '-' + const dot = 46; charCodes[charCodes["dot"] = dot] = "dot"; // '.' + const slash = 47; charCodes[charCodes["slash"] = slash] = "slash"; // '/' + const digit0 = 48; charCodes[charCodes["digit0"] = digit0] = "digit0"; // '0' + const digit1 = 49; charCodes[charCodes["digit1"] = digit1] = "digit1"; // '1' + const digit2 = 50; charCodes[charCodes["digit2"] = digit2] = "digit2"; // '2' + const digit3 = 51; charCodes[charCodes["digit3"] = digit3] = "digit3"; // '3' + const digit4 = 52; charCodes[charCodes["digit4"] = digit4] = "digit4"; // '4' + const digit5 = 53; charCodes[charCodes["digit5"] = digit5] = "digit5"; // '5' + const digit6 = 54; charCodes[charCodes["digit6"] = digit6] = "digit6"; // '6' + const digit7 = 55; charCodes[charCodes["digit7"] = digit7] = "digit7"; // '7' + const digit8 = 56; charCodes[charCodes["digit8"] = digit8] = "digit8"; // '8' + const digit9 = 57; charCodes[charCodes["digit9"] = digit9] = "digit9"; // '9' + const colon = 58; charCodes[charCodes["colon"] = colon] = "colon"; // ':' + const semicolon = 59; charCodes[charCodes["semicolon"] = semicolon] = "semicolon"; // ';' + const lessThan = 60; charCodes[charCodes["lessThan"] = lessThan] = "lessThan"; // '<' + const equalsTo = 61; charCodes[charCodes["equalsTo"] = equalsTo] = "equalsTo"; // '=' + const greaterThan = 62; charCodes[charCodes["greaterThan"] = greaterThan] = "greaterThan"; // '>' + const questionMark = 63; charCodes[charCodes["questionMark"] = questionMark] = "questionMark"; // '?' + const atSign = 64; charCodes[charCodes["atSign"] = atSign] = "atSign"; // '@' + const uppercaseA = 65; charCodes[charCodes["uppercaseA"] = uppercaseA] = "uppercaseA"; // 'A' + const uppercaseB = 66; charCodes[charCodes["uppercaseB"] = uppercaseB] = "uppercaseB"; // 'B' + const uppercaseC = 67; charCodes[charCodes["uppercaseC"] = uppercaseC] = "uppercaseC"; // 'C' + const uppercaseD = 68; charCodes[charCodes["uppercaseD"] = uppercaseD] = "uppercaseD"; // 'D' + const uppercaseE = 69; charCodes[charCodes["uppercaseE"] = uppercaseE] = "uppercaseE"; // 'E' + const uppercaseF = 70; charCodes[charCodes["uppercaseF"] = uppercaseF] = "uppercaseF"; // 'F' + const uppercaseG = 71; charCodes[charCodes["uppercaseG"] = uppercaseG] = "uppercaseG"; // 'G' + const uppercaseH = 72; charCodes[charCodes["uppercaseH"] = uppercaseH] = "uppercaseH"; // 'H' + const uppercaseI = 73; charCodes[charCodes["uppercaseI"] = uppercaseI] = "uppercaseI"; // 'I' + const uppercaseJ = 74; charCodes[charCodes["uppercaseJ"] = uppercaseJ] = "uppercaseJ"; // 'J' + const uppercaseK = 75; charCodes[charCodes["uppercaseK"] = uppercaseK] = "uppercaseK"; // 'K' + const uppercaseL = 76; charCodes[charCodes["uppercaseL"] = uppercaseL] = "uppercaseL"; // 'L' + const uppercaseM = 77; charCodes[charCodes["uppercaseM"] = uppercaseM] = "uppercaseM"; // 'M' + const uppercaseN = 78; charCodes[charCodes["uppercaseN"] = uppercaseN] = "uppercaseN"; // 'N' + const uppercaseO = 79; charCodes[charCodes["uppercaseO"] = uppercaseO] = "uppercaseO"; // 'O' + const uppercaseP = 80; charCodes[charCodes["uppercaseP"] = uppercaseP] = "uppercaseP"; // 'P' + const uppercaseQ = 81; charCodes[charCodes["uppercaseQ"] = uppercaseQ] = "uppercaseQ"; // 'Q' + const uppercaseR = 82; charCodes[charCodes["uppercaseR"] = uppercaseR] = "uppercaseR"; // 'R' + const uppercaseS = 83; charCodes[charCodes["uppercaseS"] = uppercaseS] = "uppercaseS"; // 'S' + const uppercaseT = 84; charCodes[charCodes["uppercaseT"] = uppercaseT] = "uppercaseT"; // 'T' + const uppercaseU = 85; charCodes[charCodes["uppercaseU"] = uppercaseU] = "uppercaseU"; // 'U' + const uppercaseV = 86; charCodes[charCodes["uppercaseV"] = uppercaseV] = "uppercaseV"; // 'V' + const uppercaseW = 87; charCodes[charCodes["uppercaseW"] = uppercaseW] = "uppercaseW"; // 'W' + const uppercaseX = 88; charCodes[charCodes["uppercaseX"] = uppercaseX] = "uppercaseX"; // 'X' + const uppercaseY = 89; charCodes[charCodes["uppercaseY"] = uppercaseY] = "uppercaseY"; // 'Y' + const uppercaseZ = 90; charCodes[charCodes["uppercaseZ"] = uppercaseZ] = "uppercaseZ"; // 'Z' + const leftSquareBracket = 91; charCodes[charCodes["leftSquareBracket"] = leftSquareBracket] = "leftSquareBracket"; // '[' + const backslash = 92; charCodes[charCodes["backslash"] = backslash] = "backslash"; // '\ ' + const rightSquareBracket = 93; charCodes[charCodes["rightSquareBracket"] = rightSquareBracket] = "rightSquareBracket"; // ']' + const caret = 94; charCodes[charCodes["caret"] = caret] = "caret"; // '^' + const underscore = 95; charCodes[charCodes["underscore"] = underscore] = "underscore"; // '_' + const graveAccent = 96; charCodes[charCodes["graveAccent"] = graveAccent] = "graveAccent"; // '`' + const lowercaseA = 97; charCodes[charCodes["lowercaseA"] = lowercaseA] = "lowercaseA"; // 'a' + const lowercaseB = 98; charCodes[charCodes["lowercaseB"] = lowercaseB] = "lowercaseB"; // 'b' + const lowercaseC = 99; charCodes[charCodes["lowercaseC"] = lowercaseC] = "lowercaseC"; // 'c' + const lowercaseD = 100; charCodes[charCodes["lowercaseD"] = lowercaseD] = "lowercaseD"; // 'd' + const lowercaseE = 101; charCodes[charCodes["lowercaseE"] = lowercaseE] = "lowercaseE"; // 'e' + const lowercaseF = 102; charCodes[charCodes["lowercaseF"] = lowercaseF] = "lowercaseF"; // 'f' + const lowercaseG = 103; charCodes[charCodes["lowercaseG"] = lowercaseG] = "lowercaseG"; // 'g' + const lowercaseH = 104; charCodes[charCodes["lowercaseH"] = lowercaseH] = "lowercaseH"; // 'h' + const lowercaseI = 105; charCodes[charCodes["lowercaseI"] = lowercaseI] = "lowercaseI"; // 'i' + const lowercaseJ = 106; charCodes[charCodes["lowercaseJ"] = lowercaseJ] = "lowercaseJ"; // 'j' + const lowercaseK = 107; charCodes[charCodes["lowercaseK"] = lowercaseK] = "lowercaseK"; // 'k' + const lowercaseL = 108; charCodes[charCodes["lowercaseL"] = lowercaseL] = "lowercaseL"; // 'l' + const lowercaseM = 109; charCodes[charCodes["lowercaseM"] = lowercaseM] = "lowercaseM"; // 'm' + const lowercaseN = 110; charCodes[charCodes["lowercaseN"] = lowercaseN] = "lowercaseN"; // 'n' + const lowercaseO = 111; charCodes[charCodes["lowercaseO"] = lowercaseO] = "lowercaseO"; // 'o' + const lowercaseP = 112; charCodes[charCodes["lowercaseP"] = lowercaseP] = "lowercaseP"; // 'p' + const lowercaseQ = 113; charCodes[charCodes["lowercaseQ"] = lowercaseQ] = "lowercaseQ"; // 'q' + const lowercaseR = 114; charCodes[charCodes["lowercaseR"] = lowercaseR] = "lowercaseR"; // 'r' + const lowercaseS = 115; charCodes[charCodes["lowercaseS"] = lowercaseS] = "lowercaseS"; // 's' + const lowercaseT = 116; charCodes[charCodes["lowercaseT"] = lowercaseT] = "lowercaseT"; // 't' + const lowercaseU = 117; charCodes[charCodes["lowercaseU"] = lowercaseU] = "lowercaseU"; // 'u' + const lowercaseV = 118; charCodes[charCodes["lowercaseV"] = lowercaseV] = "lowercaseV"; // 'v' + const lowercaseW = 119; charCodes[charCodes["lowercaseW"] = lowercaseW] = "lowercaseW"; // 'w' + const lowercaseX = 120; charCodes[charCodes["lowercaseX"] = lowercaseX] = "lowercaseX"; // 'x' + const lowercaseY = 121; charCodes[charCodes["lowercaseY"] = lowercaseY] = "lowercaseY"; // 'y' + const lowercaseZ = 122; charCodes[charCodes["lowercaseZ"] = lowercaseZ] = "lowercaseZ"; // 'z' + const leftCurlyBrace = 123; charCodes[charCodes["leftCurlyBrace"] = leftCurlyBrace] = "leftCurlyBrace"; // '{' + const verticalBar = 124; charCodes[charCodes["verticalBar"] = verticalBar] = "verticalBar"; // '|' + const rightCurlyBrace = 125; charCodes[charCodes["rightCurlyBrace"] = rightCurlyBrace] = "rightCurlyBrace"; // '}' + const tilde = 126; charCodes[charCodes["tilde"] = tilde] = "tilde"; // '~' + const nonBreakingSpace = 160; charCodes[charCodes["nonBreakingSpace"] = nonBreakingSpace] = "nonBreakingSpace"; + // eslint-disable-next-line no-irregular-whitespace + const oghamSpaceMark = 5760; charCodes[charCodes["oghamSpaceMark"] = oghamSpaceMark] = "oghamSpaceMark"; // ' ' + const lineSeparator = 8232; charCodes[charCodes["lineSeparator"] = lineSeparator] = "lineSeparator"; + const paragraphSeparator = 8233; charCodes[charCodes["paragraphSeparator"] = paragraphSeparator] = "paragraphSeparator"; +})(charCodes || (exports.charCodes = charCodes = {})); + + function isDigit(code) { + return ( + (code >= charCodes.digit0 && code <= charCodes.digit9) || + (code >= charCodes.lowercaseA && code <= charCodes.lowercaseF) || + (code >= charCodes.uppercaseA && code <= charCodes.uppercaseF) + ); +} exports.isDigit = isDigit; diff --git a/node_modules/sucrase/dist/parser/util/charcodes.mjs b/node_modules/sucrase/dist/parser/util/charcodes.mjs new file mode 100644 index 00000000..01a7909b --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/charcodes.mjs @@ -0,0 +1,114 @@ +export var charCodes; (function (charCodes) { + const backSpace = 8; charCodes[charCodes["backSpace"] = backSpace] = "backSpace"; + const lineFeed = 10; charCodes[charCodes["lineFeed"] = lineFeed] = "lineFeed"; // '\n' + const carriageReturn = 13; charCodes[charCodes["carriageReturn"] = carriageReturn] = "carriageReturn"; // '\r' + const shiftOut = 14; charCodes[charCodes["shiftOut"] = shiftOut] = "shiftOut"; + const space = 32; charCodes[charCodes["space"] = space] = "space"; + const exclamationMark = 33; charCodes[charCodes["exclamationMark"] = exclamationMark] = "exclamationMark"; // '!' + const quotationMark = 34; charCodes[charCodes["quotationMark"] = quotationMark] = "quotationMark"; // '"' + const numberSign = 35; charCodes[charCodes["numberSign"] = numberSign] = "numberSign"; // '#' + const dollarSign = 36; charCodes[charCodes["dollarSign"] = dollarSign] = "dollarSign"; // '$' + const percentSign = 37; charCodes[charCodes["percentSign"] = percentSign] = "percentSign"; // '%' + const ampersand = 38; charCodes[charCodes["ampersand"] = ampersand] = "ampersand"; // '&' + const apostrophe = 39; charCodes[charCodes["apostrophe"] = apostrophe] = "apostrophe"; // ''' + const leftParenthesis = 40; charCodes[charCodes["leftParenthesis"] = leftParenthesis] = "leftParenthesis"; // '(' + const rightParenthesis = 41; charCodes[charCodes["rightParenthesis"] = rightParenthesis] = "rightParenthesis"; // ')' + const asterisk = 42; charCodes[charCodes["asterisk"] = asterisk] = "asterisk"; // '*' + const plusSign = 43; charCodes[charCodes["plusSign"] = plusSign] = "plusSign"; // '+' + const comma = 44; charCodes[charCodes["comma"] = comma] = "comma"; // ',' + const dash = 45; charCodes[charCodes["dash"] = dash] = "dash"; // '-' + const dot = 46; charCodes[charCodes["dot"] = dot] = "dot"; // '.' + const slash = 47; charCodes[charCodes["slash"] = slash] = "slash"; // '/' + const digit0 = 48; charCodes[charCodes["digit0"] = digit0] = "digit0"; // '0' + const digit1 = 49; charCodes[charCodes["digit1"] = digit1] = "digit1"; // '1' + const digit2 = 50; charCodes[charCodes["digit2"] = digit2] = "digit2"; // '2' + const digit3 = 51; charCodes[charCodes["digit3"] = digit3] = "digit3"; // '3' + const digit4 = 52; charCodes[charCodes["digit4"] = digit4] = "digit4"; // '4' + const digit5 = 53; charCodes[charCodes["digit5"] = digit5] = "digit5"; // '5' + const digit6 = 54; charCodes[charCodes["digit6"] = digit6] = "digit6"; // '6' + const digit7 = 55; charCodes[charCodes["digit7"] = digit7] = "digit7"; // '7' + const digit8 = 56; charCodes[charCodes["digit8"] = digit8] = "digit8"; // '8' + const digit9 = 57; charCodes[charCodes["digit9"] = digit9] = "digit9"; // '9' + const colon = 58; charCodes[charCodes["colon"] = colon] = "colon"; // ':' + const semicolon = 59; charCodes[charCodes["semicolon"] = semicolon] = "semicolon"; // ';' + const lessThan = 60; charCodes[charCodes["lessThan"] = lessThan] = "lessThan"; // '<' + const equalsTo = 61; charCodes[charCodes["equalsTo"] = equalsTo] = "equalsTo"; // '=' + const greaterThan = 62; charCodes[charCodes["greaterThan"] = greaterThan] = "greaterThan"; // '>' + const questionMark = 63; charCodes[charCodes["questionMark"] = questionMark] = "questionMark"; // '?' + const atSign = 64; charCodes[charCodes["atSign"] = atSign] = "atSign"; // '@' + const uppercaseA = 65; charCodes[charCodes["uppercaseA"] = uppercaseA] = "uppercaseA"; // 'A' + const uppercaseB = 66; charCodes[charCodes["uppercaseB"] = uppercaseB] = "uppercaseB"; // 'B' + const uppercaseC = 67; charCodes[charCodes["uppercaseC"] = uppercaseC] = "uppercaseC"; // 'C' + const uppercaseD = 68; charCodes[charCodes["uppercaseD"] = uppercaseD] = "uppercaseD"; // 'D' + const uppercaseE = 69; charCodes[charCodes["uppercaseE"] = uppercaseE] = "uppercaseE"; // 'E' + const uppercaseF = 70; charCodes[charCodes["uppercaseF"] = uppercaseF] = "uppercaseF"; // 'F' + const uppercaseG = 71; charCodes[charCodes["uppercaseG"] = uppercaseG] = "uppercaseG"; // 'G' + const uppercaseH = 72; charCodes[charCodes["uppercaseH"] = uppercaseH] = "uppercaseH"; // 'H' + const uppercaseI = 73; charCodes[charCodes["uppercaseI"] = uppercaseI] = "uppercaseI"; // 'I' + const uppercaseJ = 74; charCodes[charCodes["uppercaseJ"] = uppercaseJ] = "uppercaseJ"; // 'J' + const uppercaseK = 75; charCodes[charCodes["uppercaseK"] = uppercaseK] = "uppercaseK"; // 'K' + const uppercaseL = 76; charCodes[charCodes["uppercaseL"] = uppercaseL] = "uppercaseL"; // 'L' + const uppercaseM = 77; charCodes[charCodes["uppercaseM"] = uppercaseM] = "uppercaseM"; // 'M' + const uppercaseN = 78; charCodes[charCodes["uppercaseN"] = uppercaseN] = "uppercaseN"; // 'N' + const uppercaseO = 79; charCodes[charCodes["uppercaseO"] = uppercaseO] = "uppercaseO"; // 'O' + const uppercaseP = 80; charCodes[charCodes["uppercaseP"] = uppercaseP] = "uppercaseP"; // 'P' + const uppercaseQ = 81; charCodes[charCodes["uppercaseQ"] = uppercaseQ] = "uppercaseQ"; // 'Q' + const uppercaseR = 82; charCodes[charCodes["uppercaseR"] = uppercaseR] = "uppercaseR"; // 'R' + const uppercaseS = 83; charCodes[charCodes["uppercaseS"] = uppercaseS] = "uppercaseS"; // 'S' + const uppercaseT = 84; charCodes[charCodes["uppercaseT"] = uppercaseT] = "uppercaseT"; // 'T' + const uppercaseU = 85; charCodes[charCodes["uppercaseU"] = uppercaseU] = "uppercaseU"; // 'U' + const uppercaseV = 86; charCodes[charCodes["uppercaseV"] = uppercaseV] = "uppercaseV"; // 'V' + const uppercaseW = 87; charCodes[charCodes["uppercaseW"] = uppercaseW] = "uppercaseW"; // 'W' + const uppercaseX = 88; charCodes[charCodes["uppercaseX"] = uppercaseX] = "uppercaseX"; // 'X' + const uppercaseY = 89; charCodes[charCodes["uppercaseY"] = uppercaseY] = "uppercaseY"; // 'Y' + const uppercaseZ = 90; charCodes[charCodes["uppercaseZ"] = uppercaseZ] = "uppercaseZ"; // 'Z' + const leftSquareBracket = 91; charCodes[charCodes["leftSquareBracket"] = leftSquareBracket] = "leftSquareBracket"; // '[' + const backslash = 92; charCodes[charCodes["backslash"] = backslash] = "backslash"; // '\ ' + const rightSquareBracket = 93; charCodes[charCodes["rightSquareBracket"] = rightSquareBracket] = "rightSquareBracket"; // ']' + const caret = 94; charCodes[charCodes["caret"] = caret] = "caret"; // '^' + const underscore = 95; charCodes[charCodes["underscore"] = underscore] = "underscore"; // '_' + const graveAccent = 96; charCodes[charCodes["graveAccent"] = graveAccent] = "graveAccent"; // '`' + const lowercaseA = 97; charCodes[charCodes["lowercaseA"] = lowercaseA] = "lowercaseA"; // 'a' + const lowercaseB = 98; charCodes[charCodes["lowercaseB"] = lowercaseB] = "lowercaseB"; // 'b' + const lowercaseC = 99; charCodes[charCodes["lowercaseC"] = lowercaseC] = "lowercaseC"; // 'c' + const lowercaseD = 100; charCodes[charCodes["lowercaseD"] = lowercaseD] = "lowercaseD"; // 'd' + const lowercaseE = 101; charCodes[charCodes["lowercaseE"] = lowercaseE] = "lowercaseE"; // 'e' + const lowercaseF = 102; charCodes[charCodes["lowercaseF"] = lowercaseF] = "lowercaseF"; // 'f' + const lowercaseG = 103; charCodes[charCodes["lowercaseG"] = lowercaseG] = "lowercaseG"; // 'g' + const lowercaseH = 104; charCodes[charCodes["lowercaseH"] = lowercaseH] = "lowercaseH"; // 'h' + const lowercaseI = 105; charCodes[charCodes["lowercaseI"] = lowercaseI] = "lowercaseI"; // 'i' + const lowercaseJ = 106; charCodes[charCodes["lowercaseJ"] = lowercaseJ] = "lowercaseJ"; // 'j' + const lowercaseK = 107; charCodes[charCodes["lowercaseK"] = lowercaseK] = "lowercaseK"; // 'k' + const lowercaseL = 108; charCodes[charCodes["lowercaseL"] = lowercaseL] = "lowercaseL"; // 'l' + const lowercaseM = 109; charCodes[charCodes["lowercaseM"] = lowercaseM] = "lowercaseM"; // 'm' + const lowercaseN = 110; charCodes[charCodes["lowercaseN"] = lowercaseN] = "lowercaseN"; // 'n' + const lowercaseO = 111; charCodes[charCodes["lowercaseO"] = lowercaseO] = "lowercaseO"; // 'o' + const lowercaseP = 112; charCodes[charCodes["lowercaseP"] = lowercaseP] = "lowercaseP"; // 'p' + const lowercaseQ = 113; charCodes[charCodes["lowercaseQ"] = lowercaseQ] = "lowercaseQ"; // 'q' + const lowercaseR = 114; charCodes[charCodes["lowercaseR"] = lowercaseR] = "lowercaseR"; // 'r' + const lowercaseS = 115; charCodes[charCodes["lowercaseS"] = lowercaseS] = "lowercaseS"; // 's' + const lowercaseT = 116; charCodes[charCodes["lowercaseT"] = lowercaseT] = "lowercaseT"; // 't' + const lowercaseU = 117; charCodes[charCodes["lowercaseU"] = lowercaseU] = "lowercaseU"; // 'u' + const lowercaseV = 118; charCodes[charCodes["lowercaseV"] = lowercaseV] = "lowercaseV"; // 'v' + const lowercaseW = 119; charCodes[charCodes["lowercaseW"] = lowercaseW] = "lowercaseW"; // 'w' + const lowercaseX = 120; charCodes[charCodes["lowercaseX"] = lowercaseX] = "lowercaseX"; // 'x' + const lowercaseY = 121; charCodes[charCodes["lowercaseY"] = lowercaseY] = "lowercaseY"; // 'y' + const lowercaseZ = 122; charCodes[charCodes["lowercaseZ"] = lowercaseZ] = "lowercaseZ"; // 'z' + const leftCurlyBrace = 123; charCodes[charCodes["leftCurlyBrace"] = leftCurlyBrace] = "leftCurlyBrace"; // '{' + const verticalBar = 124; charCodes[charCodes["verticalBar"] = verticalBar] = "verticalBar"; // '|' + const rightCurlyBrace = 125; charCodes[charCodes["rightCurlyBrace"] = rightCurlyBrace] = "rightCurlyBrace"; // '}' + const tilde = 126; charCodes[charCodes["tilde"] = tilde] = "tilde"; // '~' + const nonBreakingSpace = 160; charCodes[charCodes["nonBreakingSpace"] = nonBreakingSpace] = "nonBreakingSpace"; + // eslint-disable-next-line no-irregular-whitespace + const oghamSpaceMark = 5760; charCodes[charCodes["oghamSpaceMark"] = oghamSpaceMark] = "oghamSpaceMark"; // ' ' + const lineSeparator = 8232; charCodes[charCodes["lineSeparator"] = lineSeparator] = "lineSeparator"; + const paragraphSeparator = 8233; charCodes[charCodes["paragraphSeparator"] = paragraphSeparator] = "paragraphSeparator"; +})(charCodes || (charCodes = {})); + +export function isDigit(code) { + return ( + (code >= charCodes.digit0 && code <= charCodes.digit9) || + (code >= charCodes.lowercaseA && code <= charCodes.lowercaseF) || + (code >= charCodes.uppercaseA && code <= charCodes.uppercaseF) + ); +} diff --git a/node_modules/sucrase/dist/parser/util/identifier.d.ts b/node_modules/sucrase/dist/parser/util/identifier.d.ts new file mode 100644 index 00000000..5c2eaeb1 --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/identifier.d.ts @@ -0,0 +1,2 @@ +export declare const IS_IDENTIFIER_CHAR: Uint8Array; +export declare const IS_IDENTIFIER_START: Uint8Array; diff --git a/node_modules/sucrase/dist/parser/util/identifier.js b/node_modules/sucrase/dist/parser/util/identifier.js new file mode 100644 index 00000000..9a2813c9 --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/identifier.js @@ -0,0 +1,34 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _charcodes = require('./charcodes'); +var _whitespace = require('./whitespace'); + +function computeIsIdentifierChar(code) { + if (code < 48) return code === 36; + if (code < 58) return true; + if (code < 65) return false; + if (code < 91) return true; + if (code < 97) return code === 95; + if (code < 123) return true; + if (code < 128) return false; + throw new Error("Should not be called with non-ASCII char code."); +} + + const IS_IDENTIFIER_CHAR = new Uint8Array(65536); exports.IS_IDENTIFIER_CHAR = IS_IDENTIFIER_CHAR; +for (let i = 0; i < 128; i++) { + exports.IS_IDENTIFIER_CHAR[i] = computeIsIdentifierChar(i) ? 1 : 0; +} +for (let i = 128; i < 65536; i++) { + exports.IS_IDENTIFIER_CHAR[i] = 1; +} +// Aside from whitespace and newlines, all characters outside the ASCII space are either +// identifier characters or invalid. Since we're not performing code validation, we can just +// treat all invalid characters as identifier characters. +for (const whitespaceChar of _whitespace.WHITESPACE_CHARS) { + exports.IS_IDENTIFIER_CHAR[whitespaceChar] = 0; +} +exports.IS_IDENTIFIER_CHAR[0x2028] = 0; +exports.IS_IDENTIFIER_CHAR[0x2029] = 0; + + const IS_IDENTIFIER_START = exports.IS_IDENTIFIER_CHAR.slice(); exports.IS_IDENTIFIER_START = IS_IDENTIFIER_START; +for (let numChar = _charcodes.charCodes.digit0; numChar <= _charcodes.charCodes.digit9; numChar++) { + exports.IS_IDENTIFIER_START[numChar] = 0; +} diff --git a/node_modules/sucrase/dist/parser/util/identifier.mjs b/node_modules/sucrase/dist/parser/util/identifier.mjs new file mode 100644 index 00000000..33a6bb14 --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/identifier.mjs @@ -0,0 +1,34 @@ +import {charCodes} from "./charcodes"; +import {WHITESPACE_CHARS} from "./whitespace"; + +function computeIsIdentifierChar(code) { + if (code < 48) return code === 36; + if (code < 58) return true; + if (code < 65) return false; + if (code < 91) return true; + if (code < 97) return code === 95; + if (code < 123) return true; + if (code < 128) return false; + throw new Error("Should not be called with non-ASCII char code."); +} + +export const IS_IDENTIFIER_CHAR = new Uint8Array(65536); +for (let i = 0; i < 128; i++) { + IS_IDENTIFIER_CHAR[i] = computeIsIdentifierChar(i) ? 1 : 0; +} +for (let i = 128; i < 65536; i++) { + IS_IDENTIFIER_CHAR[i] = 1; +} +// Aside from whitespace and newlines, all characters outside the ASCII space are either +// identifier characters or invalid. Since we're not performing code validation, we can just +// treat all invalid characters as identifier characters. +for (const whitespaceChar of WHITESPACE_CHARS) { + IS_IDENTIFIER_CHAR[whitespaceChar] = 0; +} +IS_IDENTIFIER_CHAR[0x2028] = 0; +IS_IDENTIFIER_CHAR[0x2029] = 0; + +export const IS_IDENTIFIER_START = IS_IDENTIFIER_CHAR.slice(); +for (let numChar = charCodes.digit0; numChar <= charCodes.digit9; numChar++) { + IS_IDENTIFIER_START[numChar] = 0; +} diff --git a/node_modules/sucrase/dist/parser/util/whitespace.d.ts b/node_modules/sucrase/dist/parser/util/whitespace.d.ts new file mode 100644 index 00000000..3547a592 --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/whitespace.d.ts @@ -0,0 +1,2 @@ +export declare const WHITESPACE_CHARS: Array; +export declare const IS_WHITESPACE: Uint8Array; diff --git a/node_modules/sucrase/dist/parser/util/whitespace.js b/node_modules/sucrase/dist/parser/util/whitespace.js new file mode 100644 index 00000000..5ba4abc9 --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/whitespace.js @@ -0,0 +1,31 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _charcodes = require('./charcodes'); + +// https://tc39.github.io/ecma262/#sec-white-space + const WHITESPACE_CHARS = [ + 0x0009, + 0x000b, + 0x000c, + _charcodes.charCodes.space, + _charcodes.charCodes.nonBreakingSpace, + _charcodes.charCodes.oghamSpaceMark, + 0x2000, // EN QUAD + 0x2001, // EM QUAD + 0x2002, // EN SPACE + 0x2003, // EM SPACE + 0x2004, // THREE-PER-EM SPACE + 0x2005, // FOUR-PER-EM SPACE + 0x2006, // SIX-PER-EM SPACE + 0x2007, // FIGURE SPACE + 0x2008, // PUNCTUATION SPACE + 0x2009, // THIN SPACE + 0x200a, // HAIR SPACE + 0x202f, // NARROW NO-BREAK SPACE + 0x205f, // MEDIUM MATHEMATICAL SPACE + 0x3000, // IDEOGRAPHIC SPACE + 0xfeff, // ZERO WIDTH NO-BREAK SPACE +]; exports.WHITESPACE_CHARS = WHITESPACE_CHARS; + + const IS_WHITESPACE = new Uint8Array(65536); exports.IS_WHITESPACE = IS_WHITESPACE; +for (const char of exports.WHITESPACE_CHARS) { + exports.IS_WHITESPACE[char] = 1; +} diff --git a/node_modules/sucrase/dist/parser/util/whitespace.mjs b/node_modules/sucrase/dist/parser/util/whitespace.mjs new file mode 100644 index 00000000..0840af8a --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/whitespace.mjs @@ -0,0 +1,31 @@ +import {charCodes} from "./charcodes"; + +// https://tc39.github.io/ecma262/#sec-white-space +export const WHITESPACE_CHARS = [ + 0x0009, + 0x000b, + 0x000c, + charCodes.space, + charCodes.nonBreakingSpace, + charCodes.oghamSpaceMark, + 0x2000, // EN QUAD + 0x2001, // EM QUAD + 0x2002, // EN SPACE + 0x2003, // EM SPACE + 0x2004, // THREE-PER-EM SPACE + 0x2005, // FOUR-PER-EM SPACE + 0x2006, // SIX-PER-EM SPACE + 0x2007, // FIGURE SPACE + 0x2008, // PUNCTUATION SPACE + 0x2009, // THIN SPACE + 0x200a, // HAIR SPACE + 0x202f, // NARROW NO-BREAK SPACE + 0x205f, // MEDIUM MATHEMATICAL SPACE + 0x3000, // IDEOGRAPHIC SPACE + 0xfeff, // ZERO WIDTH NO-BREAK SPACE +]; + +export const IS_WHITESPACE = new Uint8Array(65536); +for (const char of WHITESPACE_CHARS) { + IS_WHITESPACE[char] = 1; +} diff --git a/node_modules/sucrase/dist/register.d.ts b/node_modules/sucrase/dist/register.d.ts new file mode 100644 index 00000000..a1e62fd0 --- /dev/null +++ b/node_modules/sucrase/dist/register.d.ts @@ -0,0 +1,9 @@ +import { Options } from "./index"; +export declare function addHook(extension: string, options: Options): void; +export declare function registerJS(): void; +export declare function registerJSX(): void; +export declare function registerTS(): void; +export declare function registerTSX(): void; +export declare function registerTSLegacyModuleInterop(): void; +export declare function registerTSXLegacyModuleInterop(): void; +export declare function registerAll(): void; diff --git a/node_modules/sucrase/dist/register.js b/node_modules/sucrase/dist/register.js new file mode 100644 index 00000000..aca618fc --- /dev/null +++ b/node_modules/sucrase/dist/register.js @@ -0,0 +1,57 @@ +"use strict"; function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } }Object.defineProperty(exports, "__esModule", {value: true});// @ts-ignore: no types available. +var _pirates = require('pirates'); var pirates = _interopRequireWildcard(_pirates); + +var _index = require('./index'); + + function addHook(extension, options) { + pirates.addHook( + (code, filePath) => { + const {code: transformedCode, sourceMap} = _index.transform.call(void 0, code, { + ...options, + sourceMapOptions: {compiledFilename: filePath}, + filePath, + }); + const mapBase64 = Buffer.from(JSON.stringify(sourceMap)).toString("base64"); + const suffix = `//# sourceMappingURL=data:application/json;charset=utf-8;base64,${mapBase64}`; + return `${transformedCode}\n${suffix}`; + }, + {exts: [extension]}, + ); +} exports.addHook = addHook; + + function registerJS() { + addHook(".js", {transforms: ["imports", "flow", "jsx"]}); +} exports.registerJS = registerJS; + + function registerJSX() { + addHook(".jsx", {transforms: ["imports", "flow", "jsx"]}); +} exports.registerJSX = registerJSX; + + function registerTS() { + addHook(".ts", {transforms: ["imports", "typescript"]}); +} exports.registerTS = registerTS; + + function registerTSX() { + addHook(".tsx", {transforms: ["imports", "typescript", "jsx"]}); +} exports.registerTSX = registerTSX; + + function registerTSLegacyModuleInterop() { + addHook(".ts", { + transforms: ["imports", "typescript"], + enableLegacyTypeScriptModuleInterop: true, + }); +} exports.registerTSLegacyModuleInterop = registerTSLegacyModuleInterop; + + function registerTSXLegacyModuleInterop() { + addHook(".tsx", { + transforms: ["imports", "typescript", "jsx"], + enableLegacyTypeScriptModuleInterop: true, + }); +} exports.registerTSXLegacyModuleInterop = registerTSXLegacyModuleInterop; + + function registerAll() { + registerJS(); + registerJSX(); + registerTS(); + registerTSX(); +} exports.registerAll = registerAll; diff --git a/node_modules/sucrase/dist/register.mjs b/node_modules/sucrase/dist/register.mjs new file mode 100644 index 00000000..47897517 --- /dev/null +++ b/node_modules/sucrase/dist/register.mjs @@ -0,0 +1,57 @@ +// @ts-ignore: no types available. +import * as pirates from "pirates"; + +import { transform} from "./index"; + +export function addHook(extension, options) { + pirates.addHook( + (code, filePath) => { + const {code: transformedCode, sourceMap} = transform(code, { + ...options, + sourceMapOptions: {compiledFilename: filePath}, + filePath, + }); + const mapBase64 = Buffer.from(JSON.stringify(sourceMap)).toString("base64"); + const suffix = `//# sourceMappingURL=data:application/json;charset=utf-8;base64,${mapBase64}`; + return `${transformedCode}\n${suffix}`; + }, + {exts: [extension]}, + ); +} + +export function registerJS() { + addHook(".js", {transforms: ["imports", "flow", "jsx"]}); +} + +export function registerJSX() { + addHook(".jsx", {transforms: ["imports", "flow", "jsx"]}); +} + +export function registerTS() { + addHook(".ts", {transforms: ["imports", "typescript"]}); +} + +export function registerTSX() { + addHook(".tsx", {transforms: ["imports", "typescript", "jsx"]}); +} + +export function registerTSLegacyModuleInterop() { + addHook(".ts", { + transforms: ["imports", "typescript"], + enableLegacyTypeScriptModuleInterop: true, + }); +} + +export function registerTSXLegacyModuleInterop() { + addHook(".tsx", { + transforms: ["imports", "typescript", "jsx"], + enableLegacyTypeScriptModuleInterop: true, + }); +} + +export function registerAll() { + registerJS(); + registerJSX(); + registerTS(); + registerTSX(); +} diff --git a/node_modules/sucrase/dist/transformers/CJSImportTransformer.d.ts b/node_modules/sucrase/dist/transformers/CJSImportTransformer.d.ts new file mode 100644 index 00000000..1d7a4e1f --- /dev/null +++ b/node_modules/sucrase/dist/transformers/CJSImportTransformer.d.ts @@ -0,0 +1,131 @@ +import CJSImportProcessor from "../CJSImportProcessor"; +import NameManager from "../NameManager"; +import TokenProcessor from "../TokenProcessor"; +import ReactHotLoaderTransformer from "./ReactHotLoaderTransformer"; +import RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +/** + * Class for editing import statements when we are transforming to commonjs. + */ +export default class CJSImportTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly importProcessor: CJSImportProcessor; + readonly nameManager: NameManager; + readonly reactHotLoaderTransformer: ReactHotLoaderTransformer | null; + readonly enableLegacyBabel5ModuleInterop: boolean; + readonly isTypeScriptTransformEnabled: boolean; + private hadExport; + private hadNamedExport; + private hadDefaultExport; + private declarationInfo; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, importProcessor: CJSImportProcessor, nameManager: NameManager, reactHotLoaderTransformer: ReactHotLoaderTransformer | null, enableLegacyBabel5ModuleInterop: boolean, isTypeScriptTransformEnabled: boolean); + getPrefixCode(): string; + getSuffixCode(): string; + process(): boolean; + private processImportEquals; + /** + * Transform this: + * import foo, {bar} from 'baz'; + * into + * var _baz = require('baz'); var _baz2 = _interopRequireDefault(_baz); + * + * The import code was already generated in the import preprocessing step, so + * we just need to look it up. + */ + private processImport; + /** + * Erase this import, and return true if it was either of the form "import type" or contained only + * "type" named imports. Such imports should not even do a side-effect import. + * + * The position should end at the import string. + */ + private removeImportAndDetectIfType; + private removeRemainingImport; + private processIdentifier; + processObjectShorthand(): boolean; + processExport(): boolean; + private processAssignment; + /** + * Process something like `a += 3`, where `a` might be an exported value. + */ + private processComplexAssignment; + /** + * Process something like `++a`, where `a` might be an exported value. + */ + private processPreIncDec; + /** + * Process something like `a++`, where `a` might be an exported value. + * This starts at the `a`, not at the `++`. + */ + private processPostIncDec; + private processExportDefault; + /** + * Transform a declaration like `export var`, `export let`, or `export const`. + */ + private processExportVar; + /** + * Determine if the export is of the form: + * export var/let/const [varName] = [expr]; + * In other words, determine if function name inference might apply. + */ + private isSimpleExportVar; + /** + * Transform an `export var` declaration initializing a single variable. + * + * For example, this: + * export const f = () => {}; + * becomes this: + * const f = () => {}; exports.f = f; + * + * The variable is unused (e.g. exports.f has the true value of the export). + * We need to produce an assignment of this form so that the function will + * have an inferred name of "f", which wouldn't happen in the more general + * case below. + */ + private processSimpleExportVar; + /** + * Transform normal declaration exports, including handling destructuring. + * For example, this: + * export const {x: [a = 2, b], c} = d; + * becomes this: + * ({x: [exports.a = 2, exports.b], c: exports.c} = d;) + */ + private processComplexExportVar; + /** + * Transform this: + * export function foo() {} + * into this: + * function foo() {} exports.foo = foo; + */ + private processExportFunction; + /** + * Skip past a function with a name and return that name. + */ + private processNamedFunction; + /** + * Transform this: + * export class A {} + * into this: + * class A {} exports.A = A; + */ + private processExportClass; + /** + * Transform this: + * export {a, b as c}; + * into this: + * exports.a = a; exports.c = b; + * + * OR + * + * Transform this: + * export {a, b as c} from './foo'; + * into the pre-generated Object.defineProperty code from the ImportProcessor. + * + * For the first case, if the TypeScript transform is enabled, we need to skip + * exports that are only defined as types. + */ + private processExportBindings; + private processExportStar; + private shouldElideExportedIdentifier; +} diff --git a/node_modules/sucrase/dist/transformers/CJSImportTransformer.js b/node_modules/sucrase/dist/transformers/CJSImportTransformer.js new file mode 100644 index 00000000..50020f1e --- /dev/null +++ b/node_modules/sucrase/dist/transformers/CJSImportTransformer.js @@ -0,0 +1,777 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); + +var _tokenizer = require('../parser/tokenizer'); +var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + +var _elideImportEquals = require('../util/elideImportEquals'); var _elideImportEquals2 = _interopRequireDefault(_elideImportEquals); + + + +var _getDeclarationInfo = require('../util/getDeclarationInfo'); var _getDeclarationInfo2 = _interopRequireDefault(_getDeclarationInfo); +var _shouldElideDefaultExport = require('../util/shouldElideDefaultExport'); var _shouldElideDefaultExport2 = _interopRequireDefault(_shouldElideDefaultExport); + + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + +/** + * Class for editing import statements when we are transforming to commonjs. + */ + class CJSImportTransformer extends _Transformer2.default { + __init() {this.hadExport = false} + __init2() {this.hadNamedExport = false} + __init3() {this.hadDefaultExport = false} + + + constructor( + rootTransformer, + tokens, + importProcessor, + nameManager, + reactHotLoaderTransformer, + enableLegacyBabel5ModuleInterop, + isTypeScriptTransformEnabled, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.nameManager = nameManager;this.reactHotLoaderTransformer = reactHotLoaderTransformer;this.enableLegacyBabel5ModuleInterop = enableLegacyBabel5ModuleInterop;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;CJSImportTransformer.prototype.__init.call(this);CJSImportTransformer.prototype.__init2.call(this);CJSImportTransformer.prototype.__init3.call(this);; + this.declarationInfo = isTypeScriptTransformEnabled + ? _getDeclarationInfo2.default.call(void 0, tokens) + : _getDeclarationInfo.EMPTY_DECLARATION_INFO; + } + + getPrefixCode() { + let prefix = this.importProcessor.getPrefixCode(); + if (this.hadExport) { + prefix += 'Object.defineProperty(exports, "__esModule", {value: true});'; + } + return prefix; + } + + getSuffixCode() { + if (this.enableLegacyBabel5ModuleInterop && this.hadDefaultExport && !this.hadNamedExport) { + return "\nmodule.exports = exports.default;\n"; + } + return ""; + } + + process() { + // TypeScript `import foo = require('foo');` should always just be translated to plain require. + if (this.tokens.matches3(_types.TokenType._import, _types.TokenType.name, _types.TokenType.eq)) { + return this.processImportEquals(); + } + if (this.tokens.matches1(_types.TokenType._import)) { + this.processImport(); + return true; + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.eq)) { + this.tokens.replaceToken("module.exports"); + return true; + } + if (this.tokens.matches1(_types.TokenType._export) && !this.tokens.currentToken().isType) { + this.hadExport = true; + return this.processExport(); + } + if (this.tokens.matches2(_types.TokenType.name, _types.TokenType.postIncDec)) { + // Fall through to normal identifier matching if this doesn't apply. + if (this.processPostIncDec()) { + return true; + } + } + if (this.tokens.matches1(_types.TokenType.name) || this.tokens.matches1(_types.TokenType.jsxName)) { + return this.processIdentifier(); + } + if (this.tokens.matches1(_types.TokenType.eq)) { + return this.processAssignment(); + } + if (this.tokens.matches1(_types.TokenType.assign)) { + return this.processComplexAssignment(); + } + if (this.tokens.matches1(_types.TokenType.preIncDec)) { + return this.processPreIncDec(); + } + return false; + } + + processImportEquals() { + const importName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + if (this.importProcessor.isTypeName(importName)) { + // If this name is only used as a type, elide the whole import. + _elideImportEquals2.default.call(void 0, this.tokens); + } else { + // Otherwise, switch `import` to `const`. + this.tokens.replaceToken("const"); + } + return true; + } + + /** + * Transform this: + * import foo, {bar} from 'baz'; + * into + * var _baz = require('baz'); var _baz2 = _interopRequireDefault(_baz); + * + * The import code was already generated in the import preprocessing step, so + * we just need to look it up. + */ + processImport() { + if (this.tokens.matches2(_types.TokenType._import, _types.TokenType.parenL)) { + this.tokens.replaceToken("Promise.resolve().then(() => require"); + const contextId = this.tokens.currentToken().contextId; + if (contextId == null) { + throw new Error("Expected context ID on dynamic import invocation."); + } + this.tokens.copyToken(); + while (!this.tokens.matchesContextIdAndLabel(_types.TokenType.parenR, contextId)) { + this.rootTransformer.processToken(); + } + this.tokens.replaceToken("))"); + return; + } + + const wasOnlyTypes = this.removeImportAndDetectIfType(); + + if (wasOnlyTypes) { + this.tokens.removeToken(); + } else { + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + this.tokens.appendCode(this.importProcessor.claimImportCode(path)); + } + if (this.tokens.matches1(_types.TokenType.semi)) { + this.tokens.removeToken(); + } + } + + /** + * Erase this import, and return true if it was either of the form "import type" or contained only + * "type" named imports. Such imports should not even do a side-effect import. + * + * The position should end at the import string. + */ + removeImportAndDetectIfType() { + this.tokens.removeInitialToken(); + if ( + this.tokens.matchesContextual(_keywords.ContextualKeyword._type) && + !this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, _types.TokenType.comma) && + !this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, _keywords.ContextualKeyword._from) + ) { + // This is an "import type" statement, so exit early. + this.removeRemainingImport(); + return true; + } + + if (this.tokens.matches1(_types.TokenType.name) || this.tokens.matches1(_types.TokenType.star)) { + // We have a default import or namespace import, so there must be some + // non-type import. + this.removeRemainingImport(); + return false; + } + + if (this.tokens.matches1(_types.TokenType.string)) { + // This is a bare import, so we should proceed with the import. + return false; + } + + let foundNonType = false; + while (!this.tokens.matches1(_types.TokenType.string)) { + // Check if any named imports are of the form "foo" or "foo as bar", with + // no leading "type". + if ((!foundNonType && this.tokens.matches1(_types.TokenType.braceL)) || this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + if ( + this.tokens.matches2(_types.TokenType.name, _types.TokenType.comma) || + this.tokens.matches2(_types.TokenType.name, _types.TokenType.braceR) || + this.tokens.matches4(_types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.comma) || + this.tokens.matches4(_types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.braceR) + ) { + foundNonType = true; + } + } + this.tokens.removeToken(); + } + return !foundNonType; + } + + removeRemainingImport() { + while (!this.tokens.matches1(_types.TokenType.string)) { + this.tokens.removeToken(); + } + } + + processIdentifier() { + const token = this.tokens.currentToken(); + if (token.shadowsGlobal) { + return false; + } + + if (token.identifierRole === _tokenizer.IdentifierRole.ObjectShorthand) { + return this.processObjectShorthand(); + } + + if (token.identifierRole !== _tokenizer.IdentifierRole.Access) { + return false; + } + const replacement = this.importProcessor.getIdentifierReplacement( + this.tokens.identifierNameForToken(token), + ); + if (!replacement) { + return false; + } + // Tolerate any number of closing parens while looking for an opening paren + // that indicates a function call. + let possibleOpenParenIndex = this.tokens.currentIndex() + 1; + while ( + possibleOpenParenIndex < this.tokens.tokens.length && + this.tokens.tokens[possibleOpenParenIndex].type === _types.TokenType.parenR + ) { + possibleOpenParenIndex++; + } + // Avoid treating imported functions as methods of their `exports` object + // by using `(0, f)` when the identifier is in a paren expression. Else + // use `Function.prototype.call` when the identifier is a guaranteed + // function call. When using `call`, pass undefined as the context. + if (this.tokens.tokens[possibleOpenParenIndex].type === _types.TokenType.parenL) { + if ( + this.tokens.tokenAtRelativeIndex(1).type === _types.TokenType.parenL && + this.tokens.tokenAtRelativeIndex(-1).type !== _types.TokenType._new + ) { + this.tokens.replaceToken(`${replacement}.call(void 0, `); + // Remove the old paren. + this.tokens.removeToken(); + // Balance out the new paren. + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + } else { + // See here: http://2ality.com/2015/12/references.html + this.tokens.replaceToken(`(0, ${replacement})`); + } + } else { + this.tokens.replaceToken(replacement); + } + return true; + } + + processObjectShorthand() { + const identifier = this.tokens.identifierName(); + const replacement = this.importProcessor.getIdentifierReplacement(identifier); + if (!replacement) { + return false; + } + this.tokens.replaceToken(`${identifier}: ${replacement}`); + return true; + } + + processExport() { + if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._enum) || + this.tokens.matches3(_types.TokenType._export, _types.TokenType._const, _types.TokenType._enum) + ) { + // Let the TypeScript transform handle it. + return false; + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType._default)) { + this.processExportDefault(); + this.hadDefaultExport = true; + return true; + } + this.hadNamedExport = true; + if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._var) || + this.tokens.matches2(_types.TokenType._export, _types.TokenType._let) || + this.tokens.matches2(_types.TokenType._export, _types.TokenType._const) + ) { + this.processExportVar(); + return true; + } else if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._function) || + // export async function + this.tokens.matches3(_types.TokenType._export, _types.TokenType.name, _types.TokenType._function) + ) { + this.processExportFunction(); + return true; + } else if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._class) || + this.tokens.matches3(_types.TokenType._export, _types.TokenType._abstract, _types.TokenType._class) + ) { + this.processExportClass(); + return true; + } else if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.braceL)) { + this.processExportBindings(); + return true; + } else if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.star)) { + this.processExportStar(); + return true; + } else { + throw new Error("Unrecognized export syntax."); + } + } + + processAssignment() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index - 1]; + // If the LHS is a type identifier, this must be a declaration like `let a: b = c;`, + // with `b` as the identifier, so nothing needs to be done in that case. + if (identifierToken.isType || identifierToken.type !== _types.TokenType.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 2 && this.tokens.matches1AtIndex(index - 2, _types.TokenType.dot)) { + return false; + } + if (index >= 2 && [_types.TokenType._var, _types.TokenType._let, _types.TokenType._const].includes(this.tokens.tokens[index - 2].type)) { + // Declarations don't need an extra assignment. This doesn't avoid the + // assignment for comma-separated declarations, but it's still correct + // since the assignment is just redundant. + return false; + } + const assignmentSnippet = this.importProcessor.resolveExportBinding( + this.tokens.identifierNameForToken(identifierToken), + ); + if (!assignmentSnippet) { + return false; + } + this.tokens.copyToken(); + this.tokens.appendCode(` ${assignmentSnippet} =`); + return true; + } + + /** + * Process something like `a += 3`, where `a` might be an exported value. + */ + processComplexAssignment() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index - 1]; + if (identifierToken.type !== _types.TokenType.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 2 && this.tokens.matches1AtIndex(index - 2, _types.TokenType.dot)) { + return false; + } + const assignmentSnippet = this.importProcessor.resolveExportBinding( + this.tokens.identifierNameForToken(identifierToken), + ); + if (!assignmentSnippet) { + return false; + } + this.tokens.appendCode(` = ${assignmentSnippet}`); + this.tokens.copyToken(); + return true; + } + + /** + * Process something like `++a`, where `a` might be an exported value. + */ + processPreIncDec() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index + 1]; + if (identifierToken.type !== _types.TokenType.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + // Ignore things like ++a.b and ++a[b] and ++a().b. + if ( + index + 2 < this.tokens.tokens.length && + (this.tokens.matches1AtIndex(index + 2, _types.TokenType.dot) || + this.tokens.matches1AtIndex(index + 2, _types.TokenType.bracketL) || + this.tokens.matches1AtIndex(index + 2, _types.TokenType.parenL)) + ) { + return false; + } + const identifierName = this.tokens.identifierNameForToken(identifierToken); + const assignmentSnippet = this.importProcessor.resolveExportBinding(identifierName); + if (!assignmentSnippet) { + return false; + } + this.tokens.appendCode(`${assignmentSnippet} = `); + this.tokens.copyToken(); + return true; + } + + /** + * Process something like `a++`, where `a` might be an exported value. + * This starts at the `a`, not at the `++`. + */ + processPostIncDec() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index]; + const operatorToken = this.tokens.tokens[index + 1]; + if (identifierToken.type !== _types.TokenType.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 1 && this.tokens.matches1AtIndex(index - 1, _types.TokenType.dot)) { + return false; + } + const identifierName = this.tokens.identifierNameForToken(identifierToken); + const assignmentSnippet = this.importProcessor.resolveExportBinding(identifierName); + if (!assignmentSnippet) { + return false; + } + const operatorCode = this.tokens.rawCodeForToken(operatorToken); + // We might also replace the identifier with something like exports.x, so + // do that replacement here as well. + const base = this.importProcessor.getIdentifierReplacement(identifierName) || identifierName; + if (operatorCode === "++") { + this.tokens.replaceToken(`(${base} = ${assignmentSnippet} = ${base} + 1, ${base} - 1)`); + } else if (operatorCode === "--") { + this.tokens.replaceToken(`(${base} = ${assignmentSnippet} = ${base} - 1, ${base} + 1)`); + } else { + throw new Error(`Unexpected operator: ${operatorCode}`); + } + this.tokens.removeToken(); + return true; + } + + processExportDefault() { + if ( + this.tokens.matches4(_types.TokenType._export, _types.TokenType._default, _types.TokenType._function, _types.TokenType.name) || + // export default async function + this.tokens.matches5(_types.TokenType._export, _types.TokenType._default, _types.TokenType.name, _types.TokenType._function, _types.TokenType.name) + ) { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + // Named function export case: change it to a top-level function + // declaration followed by exports statement. + const name = this.processNamedFunction(); + this.tokens.appendCode(` exports.default = ${name};`); + } else if ( + this.tokens.matches4(_types.TokenType._export, _types.TokenType._default, _types.TokenType._class, _types.TokenType.name) || + this.tokens.matches5(_types.TokenType._export, _types.TokenType._default, _types.TokenType._abstract, _types.TokenType._class, _types.TokenType.name) + ) { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType._abstract)) { + this.tokens.removeToken(); + } + const name = this.rootTransformer.processNamedClass(); + this.tokens.appendCode(` exports.default = ${name};`); + } else if (this.tokens.matches3(_types.TokenType._export, _types.TokenType._default, _types.TokenType.at)) { + throw new Error("Export default statements with decorators are not yet supported."); + // After this point, this is a plain "export default E" statement. + } else if ( + _shouldElideDefaultExport2.default.call(void 0, this.isTypeScriptTransformEnabled, this.tokens, this.declarationInfo) + ) { + // If the exported value is just an identifier and should be elided by TypeScript + // rules, then remove it entirely. It will always have the form `export default e`, + // where `e` is an identifier. + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + } else if (this.reactHotLoaderTransformer) { + // We need to assign E to a variable. Change "export default E" to + // "let _default; exports.default = _default = E" + const defaultVarName = this.nameManager.claimFreeName("_default"); + this.tokens.replaceToken(`let ${defaultVarName}; exports.`); + this.tokens.copyToken(); + this.tokens.appendCode(` = ${defaultVarName} =`); + this.reactHotLoaderTransformer.setExtractedDefaultExportName(defaultVarName); + } else { + // Change "export default E" to "exports.default = E" + this.tokens.replaceToken("exports."); + this.tokens.copyToken(); + this.tokens.appendCode(" ="); + } + } + + /** + * Transform a declaration like `export var`, `export let`, or `export const`. + */ + processExportVar() { + if (this.isSimpleExportVar()) { + this.processSimpleExportVar(); + } else { + this.processComplexExportVar(); + } + } + + /** + * Determine if the export is of the form: + * export var/let/const [varName] = [expr]; + * In other words, determine if function name inference might apply. + */ + isSimpleExportVar() { + let tokenIndex = this.tokens.currentIndex(); + // export + tokenIndex++; + // var/let/const + tokenIndex++; + if (!this.tokens.matches1AtIndex(tokenIndex, _types.TokenType.name)) { + return false; + } + tokenIndex++; + while (tokenIndex < this.tokens.tokens.length && this.tokens.tokens[tokenIndex].isType) { + tokenIndex++; + } + if (!this.tokens.matches1AtIndex(tokenIndex, _types.TokenType.eq)) { + return false; + } + return true; + } + + /** + * Transform an `export var` declaration initializing a single variable. + * + * For example, this: + * export const f = () => {}; + * becomes this: + * const f = () => {}; exports.f = f; + * + * The variable is unused (e.g. exports.f has the true value of the export). + * We need to produce an assignment of this form so that the function will + * have an inferred name of "f", which wouldn't happen in the more general + * case below. + */ + processSimpleExportVar() { + // export + this.tokens.removeInitialToken(); + // var/let/const + this.tokens.copyToken(); + const varName = this.tokens.identifierName(); + // x: number -> x + while (!this.tokens.matches1(_types.TokenType.eq)) { + this.rootTransformer.processToken(); + } + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(`; exports.${varName} = ${varName}`); + } + + /** + * Transform normal declaration exports, including handling destructuring. + * For example, this: + * export const {x: [a = 2, b], c} = d; + * becomes this: + * ({x: [exports.a = 2, exports.b], c: exports.c} = d;) + */ + processComplexExportVar() { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + const needsParens = this.tokens.matches1(_types.TokenType.braceL); + if (needsParens) { + this.tokens.appendCode("("); + } + + let depth = 0; + while (true) { + if ( + this.tokens.matches1(_types.TokenType.braceL) || + this.tokens.matches1(_types.TokenType.dollarBraceL) || + this.tokens.matches1(_types.TokenType.bracketL) + ) { + depth++; + this.tokens.copyToken(); + } else if (this.tokens.matches1(_types.TokenType.braceR) || this.tokens.matches1(_types.TokenType.bracketR)) { + depth--; + this.tokens.copyToken(); + } else if ( + depth === 0 && + !this.tokens.matches1(_types.TokenType.name) && + !this.tokens.currentToken().isType + ) { + break; + } else if (this.tokens.matches1(_types.TokenType.eq)) { + // Default values might have assignments in the RHS that we want to ignore, so skip past + // them. + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + } else { + const token = this.tokens.currentToken(); + if (_tokenizer.isDeclaration.call(void 0, token)) { + const name = this.tokens.identifierName(); + let replacement = this.importProcessor.getIdentifierReplacement(name); + if (replacement === null) { + throw new Error(`Expected a replacement for ${name} in \`export var\` syntax.`); + } + if (_tokenizer.isObjectShorthandDeclaration.call(void 0, token)) { + replacement = `${name}: ${replacement}`; + } + this.tokens.replaceToken(replacement); + } else { + this.rootTransformer.processToken(); + } + } + } + + if (needsParens) { + // Seek to the end of the RHS. + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(")"); + } + } + + /** + * Transform this: + * export function foo() {} + * into this: + * function foo() {} exports.foo = foo; + */ + processExportFunction() { + this.tokens.replaceToken(""); + const name = this.processNamedFunction(); + this.tokens.appendCode(` exports.${name} = ${name};`); + } + + /** + * Skip past a function with a name and return that name. + */ + processNamedFunction() { + if (this.tokens.matches1(_types.TokenType._function)) { + this.tokens.copyToken(); + } else if (this.tokens.matches2(_types.TokenType.name, _types.TokenType._function)) { + if (!this.tokens.matchesContextual(_keywords.ContextualKeyword._async)) { + throw new Error("Expected async keyword in function export."); + } + this.tokens.copyToken(); + this.tokens.copyToken(); + } + if (this.tokens.matches1(_types.TokenType.star)) { + this.tokens.copyToken(); + } + if (!this.tokens.matches1(_types.TokenType.name)) { + throw new Error("Expected identifier for exported function name."); + } + const name = this.tokens.identifierName(); + this.tokens.copyToken(); + if (this.tokens.currentToken().isType) { + this.tokens.removeInitialToken(); + while (this.tokens.currentToken().isType) { + this.tokens.removeToken(); + } + } + this.tokens.copyExpectedToken(_types.TokenType.parenL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + this.rootTransformer.processPossibleTypeRange(); + this.tokens.copyExpectedToken(_types.TokenType.braceL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.braceR); + return name; + } + + /** + * Transform this: + * export class A {} + * into this: + * class A {} exports.A = A; + */ + processExportClass() { + this.tokens.removeInitialToken(); + if (this.tokens.matches1(_types.TokenType._abstract)) { + this.tokens.removeToken(); + } + const name = this.rootTransformer.processNamedClass(); + this.tokens.appendCode(` exports.${name} = ${name};`); + } + + /** + * Transform this: + * export {a, b as c}; + * into this: + * exports.a = a; exports.c = b; + * + * OR + * + * Transform this: + * export {a, b as c} from './foo'; + * into the pre-generated Object.defineProperty code from the ImportProcessor. + * + * For the first case, if the TypeScript transform is enabled, we need to skip + * exports that are only defined as types. + */ + processExportBindings() { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + + const exportStatements = []; + while (true) { + if (this.tokens.matches1(_types.TokenType.braceR)) { + this.tokens.removeToken(); + break; + } + + const localName = this.tokens.identifierName(); + let exportedName; + this.tokens.removeToken(); + if (this.tokens.matchesContextual(_keywords.ContextualKeyword._as)) { + this.tokens.removeToken(); + exportedName = this.tokens.identifierName(); + this.tokens.removeToken(); + } else { + exportedName = localName; + } + if (!this.shouldElideExportedIdentifier(localName)) { + const newLocalName = this.importProcessor.getIdentifierReplacement(localName); + exportStatements.push(`exports.${exportedName} = ${newLocalName || localName};`); + } + + if (this.tokens.matches1(_types.TokenType.braceR)) { + this.tokens.removeToken(); + break; + } + if (this.tokens.matches2(_types.TokenType.comma, _types.TokenType.braceR)) { + this.tokens.removeToken(); + this.tokens.removeToken(); + break; + } else if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } else { + throw new Error(`Unexpected token: ${JSON.stringify(this.tokens.currentToken())}`); + } + } + + if (this.tokens.matchesContextual(_keywords.ContextualKeyword._from)) { + // This is an export...from, so throw away the normal named export code + // and use the Object.defineProperty code from ImportProcessor. + this.tokens.removeToken(); + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + } else { + // This is a normal named export, so use that. + this.tokens.appendCode(exportStatements.join(" ")); + } + + if (this.tokens.matches1(_types.TokenType.semi)) { + this.tokens.removeToken(); + } + } + + processExportStar() { + this.tokens.removeInitialToken(); + while (!this.tokens.matches1(_types.TokenType.string)) { + this.tokens.removeToken(); + } + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + if (this.tokens.matches1(_types.TokenType.semi)) { + this.tokens.removeToken(); + } + } + + shouldElideExportedIdentifier(name) { + return this.isTypeScriptTransformEnabled && !this.declarationInfo.valueDeclarations.has(name); + } +} exports.default = CJSImportTransformer; diff --git a/node_modules/sucrase/dist/transformers/CJSImportTransformer.mjs b/node_modules/sucrase/dist/transformers/CJSImportTransformer.mjs new file mode 100644 index 00000000..e5af027d --- /dev/null +++ b/node_modules/sucrase/dist/transformers/CJSImportTransformer.mjs @@ -0,0 +1,777 @@ + + +import {IdentifierRole, isDeclaration, isObjectShorthandDeclaration} from "../parser/tokenizer"; +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + +import elideImportEquals from "../util/elideImportEquals"; +import getDeclarationInfo, { + + EMPTY_DECLARATION_INFO, +} from "../util/getDeclarationInfo"; +import shouldElideDefaultExport from "../util/shouldElideDefaultExport"; + + +import Transformer from "./Transformer"; + +/** + * Class for editing import statements when we are transforming to commonjs. + */ +export default class CJSImportTransformer extends Transformer { + __init() {this.hadExport = false} + __init2() {this.hadNamedExport = false} + __init3() {this.hadDefaultExport = false} + + + constructor( + rootTransformer, + tokens, + importProcessor, + nameManager, + reactHotLoaderTransformer, + enableLegacyBabel5ModuleInterop, + isTypeScriptTransformEnabled, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.nameManager = nameManager;this.reactHotLoaderTransformer = reactHotLoaderTransformer;this.enableLegacyBabel5ModuleInterop = enableLegacyBabel5ModuleInterop;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;CJSImportTransformer.prototype.__init.call(this);CJSImportTransformer.prototype.__init2.call(this);CJSImportTransformer.prototype.__init3.call(this);; + this.declarationInfo = isTypeScriptTransformEnabled + ? getDeclarationInfo(tokens) + : EMPTY_DECLARATION_INFO; + } + + getPrefixCode() { + let prefix = this.importProcessor.getPrefixCode(); + if (this.hadExport) { + prefix += 'Object.defineProperty(exports, "__esModule", {value: true});'; + } + return prefix; + } + + getSuffixCode() { + if (this.enableLegacyBabel5ModuleInterop && this.hadDefaultExport && !this.hadNamedExport) { + return "\nmodule.exports = exports.default;\n"; + } + return ""; + } + + process() { + // TypeScript `import foo = require('foo');` should always just be translated to plain require. + if (this.tokens.matches3(tt._import, tt.name, tt.eq)) { + return this.processImportEquals(); + } + if (this.tokens.matches1(tt._import)) { + this.processImport(); + return true; + } + if (this.tokens.matches2(tt._export, tt.eq)) { + this.tokens.replaceToken("module.exports"); + return true; + } + if (this.tokens.matches1(tt._export) && !this.tokens.currentToken().isType) { + this.hadExport = true; + return this.processExport(); + } + if (this.tokens.matches2(tt.name, tt.postIncDec)) { + // Fall through to normal identifier matching if this doesn't apply. + if (this.processPostIncDec()) { + return true; + } + } + if (this.tokens.matches1(tt.name) || this.tokens.matches1(tt.jsxName)) { + return this.processIdentifier(); + } + if (this.tokens.matches1(tt.eq)) { + return this.processAssignment(); + } + if (this.tokens.matches1(tt.assign)) { + return this.processComplexAssignment(); + } + if (this.tokens.matches1(tt.preIncDec)) { + return this.processPreIncDec(); + } + return false; + } + + processImportEquals() { + const importName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + if (this.importProcessor.isTypeName(importName)) { + // If this name is only used as a type, elide the whole import. + elideImportEquals(this.tokens); + } else { + // Otherwise, switch `import` to `const`. + this.tokens.replaceToken("const"); + } + return true; + } + + /** + * Transform this: + * import foo, {bar} from 'baz'; + * into + * var _baz = require('baz'); var _baz2 = _interopRequireDefault(_baz); + * + * The import code was already generated in the import preprocessing step, so + * we just need to look it up. + */ + processImport() { + if (this.tokens.matches2(tt._import, tt.parenL)) { + this.tokens.replaceToken("Promise.resolve().then(() => require"); + const contextId = this.tokens.currentToken().contextId; + if (contextId == null) { + throw new Error("Expected context ID on dynamic import invocation."); + } + this.tokens.copyToken(); + while (!this.tokens.matchesContextIdAndLabel(tt.parenR, contextId)) { + this.rootTransformer.processToken(); + } + this.tokens.replaceToken("))"); + return; + } + + const wasOnlyTypes = this.removeImportAndDetectIfType(); + + if (wasOnlyTypes) { + this.tokens.removeToken(); + } else { + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + this.tokens.appendCode(this.importProcessor.claimImportCode(path)); + } + if (this.tokens.matches1(tt.semi)) { + this.tokens.removeToken(); + } + } + + /** + * Erase this import, and return true if it was either of the form "import type" or contained only + * "type" named imports. Such imports should not even do a side-effect import. + * + * The position should end at the import string. + */ + removeImportAndDetectIfType() { + this.tokens.removeInitialToken(); + if ( + this.tokens.matchesContextual(ContextualKeyword._type) && + !this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, tt.comma) && + !this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, ContextualKeyword._from) + ) { + // This is an "import type" statement, so exit early. + this.removeRemainingImport(); + return true; + } + + if (this.tokens.matches1(tt.name) || this.tokens.matches1(tt.star)) { + // We have a default import or namespace import, so there must be some + // non-type import. + this.removeRemainingImport(); + return false; + } + + if (this.tokens.matches1(tt.string)) { + // This is a bare import, so we should proceed with the import. + return false; + } + + let foundNonType = false; + while (!this.tokens.matches1(tt.string)) { + // Check if any named imports are of the form "foo" or "foo as bar", with + // no leading "type". + if ((!foundNonType && this.tokens.matches1(tt.braceL)) || this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + if ( + this.tokens.matches2(tt.name, tt.comma) || + this.tokens.matches2(tt.name, tt.braceR) || + this.tokens.matches4(tt.name, tt.name, tt.name, tt.comma) || + this.tokens.matches4(tt.name, tt.name, tt.name, tt.braceR) + ) { + foundNonType = true; + } + } + this.tokens.removeToken(); + } + return !foundNonType; + } + + removeRemainingImport() { + while (!this.tokens.matches1(tt.string)) { + this.tokens.removeToken(); + } + } + + processIdentifier() { + const token = this.tokens.currentToken(); + if (token.shadowsGlobal) { + return false; + } + + if (token.identifierRole === IdentifierRole.ObjectShorthand) { + return this.processObjectShorthand(); + } + + if (token.identifierRole !== IdentifierRole.Access) { + return false; + } + const replacement = this.importProcessor.getIdentifierReplacement( + this.tokens.identifierNameForToken(token), + ); + if (!replacement) { + return false; + } + // Tolerate any number of closing parens while looking for an opening paren + // that indicates a function call. + let possibleOpenParenIndex = this.tokens.currentIndex() + 1; + while ( + possibleOpenParenIndex < this.tokens.tokens.length && + this.tokens.tokens[possibleOpenParenIndex].type === tt.parenR + ) { + possibleOpenParenIndex++; + } + // Avoid treating imported functions as methods of their `exports` object + // by using `(0, f)` when the identifier is in a paren expression. Else + // use `Function.prototype.call` when the identifier is a guaranteed + // function call. When using `call`, pass undefined as the context. + if (this.tokens.tokens[possibleOpenParenIndex].type === tt.parenL) { + if ( + this.tokens.tokenAtRelativeIndex(1).type === tt.parenL && + this.tokens.tokenAtRelativeIndex(-1).type !== tt._new + ) { + this.tokens.replaceToken(`${replacement}.call(void 0, `); + // Remove the old paren. + this.tokens.removeToken(); + // Balance out the new paren. + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.parenR); + } else { + // See here: http://2ality.com/2015/12/references.html + this.tokens.replaceToken(`(0, ${replacement})`); + } + } else { + this.tokens.replaceToken(replacement); + } + return true; + } + + processObjectShorthand() { + const identifier = this.tokens.identifierName(); + const replacement = this.importProcessor.getIdentifierReplacement(identifier); + if (!replacement) { + return false; + } + this.tokens.replaceToken(`${identifier}: ${replacement}`); + return true; + } + + processExport() { + if ( + this.tokens.matches2(tt._export, tt._enum) || + this.tokens.matches3(tt._export, tt._const, tt._enum) + ) { + // Let the TypeScript transform handle it. + return false; + } + if (this.tokens.matches2(tt._export, tt._default)) { + this.processExportDefault(); + this.hadDefaultExport = true; + return true; + } + this.hadNamedExport = true; + if ( + this.tokens.matches2(tt._export, tt._var) || + this.tokens.matches2(tt._export, tt._let) || + this.tokens.matches2(tt._export, tt._const) + ) { + this.processExportVar(); + return true; + } else if ( + this.tokens.matches2(tt._export, tt._function) || + // export async function + this.tokens.matches3(tt._export, tt.name, tt._function) + ) { + this.processExportFunction(); + return true; + } else if ( + this.tokens.matches2(tt._export, tt._class) || + this.tokens.matches3(tt._export, tt._abstract, tt._class) + ) { + this.processExportClass(); + return true; + } else if (this.tokens.matches2(tt._export, tt.braceL)) { + this.processExportBindings(); + return true; + } else if (this.tokens.matches2(tt._export, tt.star)) { + this.processExportStar(); + return true; + } else { + throw new Error("Unrecognized export syntax."); + } + } + + processAssignment() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index - 1]; + // If the LHS is a type identifier, this must be a declaration like `let a: b = c;`, + // with `b` as the identifier, so nothing needs to be done in that case. + if (identifierToken.isType || identifierToken.type !== tt.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 2 && this.tokens.matches1AtIndex(index - 2, tt.dot)) { + return false; + } + if (index >= 2 && [tt._var, tt._let, tt._const].includes(this.tokens.tokens[index - 2].type)) { + // Declarations don't need an extra assignment. This doesn't avoid the + // assignment for comma-separated declarations, but it's still correct + // since the assignment is just redundant. + return false; + } + const assignmentSnippet = this.importProcessor.resolveExportBinding( + this.tokens.identifierNameForToken(identifierToken), + ); + if (!assignmentSnippet) { + return false; + } + this.tokens.copyToken(); + this.tokens.appendCode(` ${assignmentSnippet} =`); + return true; + } + + /** + * Process something like `a += 3`, where `a` might be an exported value. + */ + processComplexAssignment() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index - 1]; + if (identifierToken.type !== tt.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 2 && this.tokens.matches1AtIndex(index - 2, tt.dot)) { + return false; + } + const assignmentSnippet = this.importProcessor.resolveExportBinding( + this.tokens.identifierNameForToken(identifierToken), + ); + if (!assignmentSnippet) { + return false; + } + this.tokens.appendCode(` = ${assignmentSnippet}`); + this.tokens.copyToken(); + return true; + } + + /** + * Process something like `++a`, where `a` might be an exported value. + */ + processPreIncDec() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index + 1]; + if (identifierToken.type !== tt.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + // Ignore things like ++a.b and ++a[b] and ++a().b. + if ( + index + 2 < this.tokens.tokens.length && + (this.tokens.matches1AtIndex(index + 2, tt.dot) || + this.tokens.matches1AtIndex(index + 2, tt.bracketL) || + this.tokens.matches1AtIndex(index + 2, tt.parenL)) + ) { + return false; + } + const identifierName = this.tokens.identifierNameForToken(identifierToken); + const assignmentSnippet = this.importProcessor.resolveExportBinding(identifierName); + if (!assignmentSnippet) { + return false; + } + this.tokens.appendCode(`${assignmentSnippet} = `); + this.tokens.copyToken(); + return true; + } + + /** + * Process something like `a++`, where `a` might be an exported value. + * This starts at the `a`, not at the `++`. + */ + processPostIncDec() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index]; + const operatorToken = this.tokens.tokens[index + 1]; + if (identifierToken.type !== tt.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 1 && this.tokens.matches1AtIndex(index - 1, tt.dot)) { + return false; + } + const identifierName = this.tokens.identifierNameForToken(identifierToken); + const assignmentSnippet = this.importProcessor.resolveExportBinding(identifierName); + if (!assignmentSnippet) { + return false; + } + const operatorCode = this.tokens.rawCodeForToken(operatorToken); + // We might also replace the identifier with something like exports.x, so + // do that replacement here as well. + const base = this.importProcessor.getIdentifierReplacement(identifierName) || identifierName; + if (operatorCode === "++") { + this.tokens.replaceToken(`(${base} = ${assignmentSnippet} = ${base} + 1, ${base} - 1)`); + } else if (operatorCode === "--") { + this.tokens.replaceToken(`(${base} = ${assignmentSnippet} = ${base} - 1, ${base} + 1)`); + } else { + throw new Error(`Unexpected operator: ${operatorCode}`); + } + this.tokens.removeToken(); + return true; + } + + processExportDefault() { + if ( + this.tokens.matches4(tt._export, tt._default, tt._function, tt.name) || + // export default async function + this.tokens.matches5(tt._export, tt._default, tt.name, tt._function, tt.name) + ) { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + // Named function export case: change it to a top-level function + // declaration followed by exports statement. + const name = this.processNamedFunction(); + this.tokens.appendCode(` exports.default = ${name};`); + } else if ( + this.tokens.matches4(tt._export, tt._default, tt._class, tt.name) || + this.tokens.matches5(tt._export, tt._default, tt._abstract, tt._class, tt.name) + ) { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(tt._abstract)) { + this.tokens.removeToken(); + } + const name = this.rootTransformer.processNamedClass(); + this.tokens.appendCode(` exports.default = ${name};`); + } else if (this.tokens.matches3(tt._export, tt._default, tt.at)) { + throw new Error("Export default statements with decorators are not yet supported."); + // After this point, this is a plain "export default E" statement. + } else if ( + shouldElideDefaultExport(this.isTypeScriptTransformEnabled, this.tokens, this.declarationInfo) + ) { + // If the exported value is just an identifier and should be elided by TypeScript + // rules, then remove it entirely. It will always have the form `export default e`, + // where `e` is an identifier. + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + } else if (this.reactHotLoaderTransformer) { + // We need to assign E to a variable. Change "export default E" to + // "let _default; exports.default = _default = E" + const defaultVarName = this.nameManager.claimFreeName("_default"); + this.tokens.replaceToken(`let ${defaultVarName}; exports.`); + this.tokens.copyToken(); + this.tokens.appendCode(` = ${defaultVarName} =`); + this.reactHotLoaderTransformer.setExtractedDefaultExportName(defaultVarName); + } else { + // Change "export default E" to "exports.default = E" + this.tokens.replaceToken("exports."); + this.tokens.copyToken(); + this.tokens.appendCode(" ="); + } + } + + /** + * Transform a declaration like `export var`, `export let`, or `export const`. + */ + processExportVar() { + if (this.isSimpleExportVar()) { + this.processSimpleExportVar(); + } else { + this.processComplexExportVar(); + } + } + + /** + * Determine if the export is of the form: + * export var/let/const [varName] = [expr]; + * In other words, determine if function name inference might apply. + */ + isSimpleExportVar() { + let tokenIndex = this.tokens.currentIndex(); + // export + tokenIndex++; + // var/let/const + tokenIndex++; + if (!this.tokens.matches1AtIndex(tokenIndex, tt.name)) { + return false; + } + tokenIndex++; + while (tokenIndex < this.tokens.tokens.length && this.tokens.tokens[tokenIndex].isType) { + tokenIndex++; + } + if (!this.tokens.matches1AtIndex(tokenIndex, tt.eq)) { + return false; + } + return true; + } + + /** + * Transform an `export var` declaration initializing a single variable. + * + * For example, this: + * export const f = () => {}; + * becomes this: + * const f = () => {}; exports.f = f; + * + * The variable is unused (e.g. exports.f has the true value of the export). + * We need to produce an assignment of this form so that the function will + * have an inferred name of "f", which wouldn't happen in the more general + * case below. + */ + processSimpleExportVar() { + // export + this.tokens.removeInitialToken(); + // var/let/const + this.tokens.copyToken(); + const varName = this.tokens.identifierName(); + // x: number -> x + while (!this.tokens.matches1(tt.eq)) { + this.rootTransformer.processToken(); + } + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(`; exports.${varName} = ${varName}`); + } + + /** + * Transform normal declaration exports, including handling destructuring. + * For example, this: + * export const {x: [a = 2, b], c} = d; + * becomes this: + * ({x: [exports.a = 2, exports.b], c: exports.c} = d;) + */ + processComplexExportVar() { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + const needsParens = this.tokens.matches1(tt.braceL); + if (needsParens) { + this.tokens.appendCode("("); + } + + let depth = 0; + while (true) { + if ( + this.tokens.matches1(tt.braceL) || + this.tokens.matches1(tt.dollarBraceL) || + this.tokens.matches1(tt.bracketL) + ) { + depth++; + this.tokens.copyToken(); + } else if (this.tokens.matches1(tt.braceR) || this.tokens.matches1(tt.bracketR)) { + depth--; + this.tokens.copyToken(); + } else if ( + depth === 0 && + !this.tokens.matches1(tt.name) && + !this.tokens.currentToken().isType + ) { + break; + } else if (this.tokens.matches1(tt.eq)) { + // Default values might have assignments in the RHS that we want to ignore, so skip past + // them. + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + } else { + const token = this.tokens.currentToken(); + if (isDeclaration(token)) { + const name = this.tokens.identifierName(); + let replacement = this.importProcessor.getIdentifierReplacement(name); + if (replacement === null) { + throw new Error(`Expected a replacement for ${name} in \`export var\` syntax.`); + } + if (isObjectShorthandDeclaration(token)) { + replacement = `${name}: ${replacement}`; + } + this.tokens.replaceToken(replacement); + } else { + this.rootTransformer.processToken(); + } + } + } + + if (needsParens) { + // Seek to the end of the RHS. + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(")"); + } + } + + /** + * Transform this: + * export function foo() {} + * into this: + * function foo() {} exports.foo = foo; + */ + processExportFunction() { + this.tokens.replaceToken(""); + const name = this.processNamedFunction(); + this.tokens.appendCode(` exports.${name} = ${name};`); + } + + /** + * Skip past a function with a name and return that name. + */ + processNamedFunction() { + if (this.tokens.matches1(tt._function)) { + this.tokens.copyToken(); + } else if (this.tokens.matches2(tt.name, tt._function)) { + if (!this.tokens.matchesContextual(ContextualKeyword._async)) { + throw new Error("Expected async keyword in function export."); + } + this.tokens.copyToken(); + this.tokens.copyToken(); + } + if (this.tokens.matches1(tt.star)) { + this.tokens.copyToken(); + } + if (!this.tokens.matches1(tt.name)) { + throw new Error("Expected identifier for exported function name."); + } + const name = this.tokens.identifierName(); + this.tokens.copyToken(); + if (this.tokens.currentToken().isType) { + this.tokens.removeInitialToken(); + while (this.tokens.currentToken().isType) { + this.tokens.removeToken(); + } + } + this.tokens.copyExpectedToken(tt.parenL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.parenR); + this.rootTransformer.processPossibleTypeRange(); + this.tokens.copyExpectedToken(tt.braceL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.braceR); + return name; + } + + /** + * Transform this: + * export class A {} + * into this: + * class A {} exports.A = A; + */ + processExportClass() { + this.tokens.removeInitialToken(); + if (this.tokens.matches1(tt._abstract)) { + this.tokens.removeToken(); + } + const name = this.rootTransformer.processNamedClass(); + this.tokens.appendCode(` exports.${name} = ${name};`); + } + + /** + * Transform this: + * export {a, b as c}; + * into this: + * exports.a = a; exports.c = b; + * + * OR + * + * Transform this: + * export {a, b as c} from './foo'; + * into the pre-generated Object.defineProperty code from the ImportProcessor. + * + * For the first case, if the TypeScript transform is enabled, we need to skip + * exports that are only defined as types. + */ + processExportBindings() { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + + const exportStatements = []; + while (true) { + if (this.tokens.matches1(tt.braceR)) { + this.tokens.removeToken(); + break; + } + + const localName = this.tokens.identifierName(); + let exportedName; + this.tokens.removeToken(); + if (this.tokens.matchesContextual(ContextualKeyword._as)) { + this.tokens.removeToken(); + exportedName = this.tokens.identifierName(); + this.tokens.removeToken(); + } else { + exportedName = localName; + } + if (!this.shouldElideExportedIdentifier(localName)) { + const newLocalName = this.importProcessor.getIdentifierReplacement(localName); + exportStatements.push(`exports.${exportedName} = ${newLocalName || localName};`); + } + + if (this.tokens.matches1(tt.braceR)) { + this.tokens.removeToken(); + break; + } + if (this.tokens.matches2(tt.comma, tt.braceR)) { + this.tokens.removeToken(); + this.tokens.removeToken(); + break; + } else if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } else { + throw new Error(`Unexpected token: ${JSON.stringify(this.tokens.currentToken())}`); + } + } + + if (this.tokens.matchesContextual(ContextualKeyword._from)) { + // This is an export...from, so throw away the normal named export code + // and use the Object.defineProperty code from ImportProcessor. + this.tokens.removeToken(); + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + } else { + // This is a normal named export, so use that. + this.tokens.appendCode(exportStatements.join(" ")); + } + + if (this.tokens.matches1(tt.semi)) { + this.tokens.removeToken(); + } + } + + processExportStar() { + this.tokens.removeInitialToken(); + while (!this.tokens.matches1(tt.string)) { + this.tokens.removeToken(); + } + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + if (this.tokens.matches1(tt.semi)) { + this.tokens.removeToken(); + } + } + + shouldElideExportedIdentifier(name) { + return this.isTypeScriptTransformEnabled && !this.declarationInfo.valueDeclarations.has(name); + } +} diff --git a/node_modules/sucrase/dist/transformers/ESMImportTransformer.d.ts b/node_modules/sucrase/dist/transformers/ESMImportTransformer.d.ts new file mode 100644 index 00000000..6fea4f21 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ESMImportTransformer.d.ts @@ -0,0 +1,41 @@ +import { Options } from "../index"; +import NameManager from "../NameManager"; +import TokenProcessor from "../TokenProcessor"; +import ReactHotLoaderTransformer from "./ReactHotLoaderTransformer"; +import Transformer from "./Transformer"; +/** + * Class for editing import statements when we are keeping the code as ESM. We still need to remove + * type-only imports in TypeScript and Flow. + */ +export default class ESMImportTransformer extends Transformer { + readonly tokens: TokenProcessor; + readonly nameManager: NameManager; + readonly reactHotLoaderTransformer: ReactHotLoaderTransformer | null; + readonly isTypeScriptTransformEnabled: boolean; + private nonTypeIdentifiers; + private declarationInfo; + constructor(tokens: TokenProcessor, nameManager: NameManager, reactHotLoaderTransformer: ReactHotLoaderTransformer | null, isTypeScriptTransformEnabled: boolean, options: Options); + process(): boolean; + private processImportEquals; + private processImport; + /** + * Remove type bindings from this import, leaving the rest of the import intact. + * + * Return true if this import was ONLY types, and thus is eligible for removal. This will bail out + * of the replacement operation, so we can return early here. + */ + private removeImportTypeBindings; + private isTypeName; + private processExportDefault; + /** + * In TypeScript, we need to remove named exports that were never declared or only declared as a + * type. + */ + private processNamedExports; + /** + * ESM elides all imports with the rule that we only elide if we see that it's + * a type and never see it as a value. This is in contract to CJS, which + * elides imports that are completely unknown. + */ + private shouldElideExportedName; +} diff --git a/node_modules/sucrase/dist/transformers/ESMImportTransformer.js b/node_modules/sucrase/dist/transformers/ESMImportTransformer.js new file mode 100644 index 00000000..56660bcd --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ESMImportTransformer.js @@ -0,0 +1,310 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); + +var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + +var _elideImportEquals = require('../util/elideImportEquals'); var _elideImportEquals2 = _interopRequireDefault(_elideImportEquals); + + + +var _getDeclarationInfo = require('../util/getDeclarationInfo'); var _getDeclarationInfo2 = _interopRequireDefault(_getDeclarationInfo); +var _getNonTypeIdentifiers = require('../util/getNonTypeIdentifiers'); +var _shouldElideDefaultExport = require('../util/shouldElideDefaultExport'); var _shouldElideDefaultExport2 = _interopRequireDefault(_shouldElideDefaultExport); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + +/** + * Class for editing import statements when we are keeping the code as ESM. We still need to remove + * type-only imports in TypeScript and Flow. + */ + class ESMImportTransformer extends _Transformer2.default { + + + + constructor( + tokens, + nameManager, + reactHotLoaderTransformer, + isTypeScriptTransformEnabled, + options, + ) { + super();this.tokens = tokens;this.nameManager = nameManager;this.reactHotLoaderTransformer = reactHotLoaderTransformer;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;; + this.nonTypeIdentifiers = isTypeScriptTransformEnabled + ? _getNonTypeIdentifiers.getNonTypeIdentifiers.call(void 0, tokens, options) + : new Set(); + this.declarationInfo = isTypeScriptTransformEnabled + ? _getDeclarationInfo2.default.call(void 0, tokens) + : _getDeclarationInfo.EMPTY_DECLARATION_INFO; + } + + process() { + // TypeScript `import foo = require('foo');` should always just be translated to plain require. + if (this.tokens.matches3(_types.TokenType._import, _types.TokenType.name, _types.TokenType.eq)) { + return this.processImportEquals(); + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.eq)) { + this.tokens.replaceToken("module.exports"); + return true; + } + if (this.tokens.matches1(_types.TokenType._import)) { + return this.processImport(); + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType._default)) { + return this.processExportDefault(); + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.braceL)) { + return this.processNamedExports(); + } + return false; + } + + processImportEquals() { + const importName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + if (this.isTypeName(importName)) { + // If this name is only used as a type, elide the whole import. + _elideImportEquals2.default.call(void 0, this.tokens); + } else { + // Otherwise, switch `import` to `const`. + this.tokens.replaceToken("const"); + } + return true; + } + + processImport() { + if (this.tokens.matches2(_types.TokenType._import, _types.TokenType.parenL)) { + // Dynamic imports don't need to be transformed. + return false; + } + + const snapshot = this.tokens.snapshot(); + const allImportsRemoved = this.removeImportTypeBindings(); + if (allImportsRemoved) { + this.tokens.restoreToSnapshot(snapshot); + while (!this.tokens.matches1(_types.TokenType.string)) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType.semi)) { + this.tokens.removeToken(); + } + } + return true; + } + + /** + * Remove type bindings from this import, leaving the rest of the import intact. + * + * Return true if this import was ONLY types, and thus is eligible for removal. This will bail out + * of the replacement operation, so we can return early here. + */ + removeImportTypeBindings() { + this.tokens.copyExpectedToken(_types.TokenType._import); + if ( + this.tokens.matchesContextual(_keywords.ContextualKeyword._type) && + !this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, _types.TokenType.comma) && + !this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, _keywords.ContextualKeyword._from) + ) { + // This is an "import type" statement, so exit early. + return true; + } + + if (this.tokens.matches1(_types.TokenType.string)) { + // This is a bare import, so we should proceed with the import. + this.tokens.copyToken(); + return false; + } + + let foundNonTypeImport = false; + + if (this.tokens.matches1(_types.TokenType.name)) { + if (this.isTypeName(this.tokens.identifierName())) { + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + this.tokens.copyToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.copyToken(); + } + } + } + + if (this.tokens.matches1(_types.TokenType.star)) { + if (this.isTypeName(this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 2))) { + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + } else { + foundNonTypeImport = true; + this.tokens.copyExpectedToken(_types.TokenType.star); + this.tokens.copyExpectedToken(_types.TokenType.name); + this.tokens.copyExpectedToken(_types.TokenType.name); + } + } else if (this.tokens.matches1(_types.TokenType.braceL)) { + this.tokens.copyToken(); + while (!this.tokens.matches1(_types.TokenType.braceR)) { + if ( + this.tokens.matches3(_types.TokenType.name, _types.TokenType.name, _types.TokenType.comma) || + this.tokens.matches3(_types.TokenType.name, _types.TokenType.name, _types.TokenType.braceR) + ) { + // type foo + this.tokens.removeToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + } else if ( + this.tokens.matches5(_types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.comma) || + this.tokens.matches5(_types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.braceR) + ) { + // type foo as bar + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + } else if ( + this.tokens.matches2(_types.TokenType.name, _types.TokenType.comma) || + this.tokens.matches2(_types.TokenType.name, _types.TokenType.braceR) + ) { + // foo + if (this.isTypeName(this.tokens.identifierName())) { + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + this.tokens.copyToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.copyToken(); + } + } + } else if ( + this.tokens.matches4(_types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.comma) || + this.tokens.matches4(_types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.braceR) + ) { + // foo as bar + if (this.isTypeName(this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 2))) { + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + this.tokens.copyToken(); + this.tokens.copyToken(); + this.tokens.copyToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.copyToken(); + } + } + } else { + throw new Error("Unexpected import form."); + } + } + this.tokens.copyExpectedToken(_types.TokenType.braceR); + } + + return !foundNonTypeImport; + } + + isTypeName(name) { + return this.isTypeScriptTransformEnabled && !this.nonTypeIdentifiers.has(name); + } + + processExportDefault() { + if ( + _shouldElideDefaultExport2.default.call(void 0, this.isTypeScriptTransformEnabled, this.tokens, this.declarationInfo) + ) { + // If the exported value is just an identifier and should be elided by TypeScript + // rules, then remove it entirely. It will always have the form `export default e`, + // where `e` is an identifier. + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + return true; + } + + const alreadyHasName = + this.tokens.matches4(_types.TokenType._export, _types.TokenType._default, _types.TokenType._function, _types.TokenType.name) || + // export default async function + this.tokens.matches5(_types.TokenType._export, _types.TokenType._default, _types.TokenType.name, _types.TokenType._function, _types.TokenType.name) || + this.tokens.matches4(_types.TokenType._export, _types.TokenType._default, _types.TokenType._class, _types.TokenType.name) || + this.tokens.matches5(_types.TokenType._export, _types.TokenType._default, _types.TokenType._abstract, _types.TokenType._class, _types.TokenType.name); + + if (!alreadyHasName && this.reactHotLoaderTransformer) { + // This is a plain "export default E" statement and we need to assign E to a variable. + // Change "export default E" to "let _default; export default _default = E" + const defaultVarName = this.nameManager.claimFreeName("_default"); + this.tokens.replaceToken(`let ${defaultVarName}; export`); + this.tokens.copyToken(); + this.tokens.appendCode(` ${defaultVarName} =`); + this.reactHotLoaderTransformer.setExtractedDefaultExportName(defaultVarName); + return true; + } + return false; + } + + /** + * In TypeScript, we need to remove named exports that were never declared or only declared as a + * type. + */ + processNamedExports() { + if (!this.isTypeScriptTransformEnabled) { + return false; + } + this.tokens.copyExpectedToken(_types.TokenType._export); + this.tokens.copyExpectedToken(_types.TokenType.braceL); + + while (!this.tokens.matches1(_types.TokenType.braceR)) { + if (!this.tokens.matches1(_types.TokenType.name)) { + throw new Error("Expected identifier at the start of named export."); + } + if (this.shouldElideExportedName(this.tokens.identifierName())) { + while ( + !this.tokens.matches1(_types.TokenType.comma) && + !this.tokens.matches1(_types.TokenType.braceR) && + !this.tokens.isAtEnd() + ) { + this.tokens.removeToken(); + } + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + } else { + while ( + !this.tokens.matches1(_types.TokenType.comma) && + !this.tokens.matches1(_types.TokenType.braceR) && + !this.tokens.isAtEnd() + ) { + this.tokens.copyToken(); + } + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.copyToken(); + } + } + } + this.tokens.copyExpectedToken(_types.TokenType.braceR); + return true; + } + + /** + * ESM elides all imports with the rule that we only elide if we see that it's + * a type and never see it as a value. This is in contract to CJS, which + * elides imports that are completely unknown. + */ + shouldElideExportedName(name) { + return ( + this.isTypeScriptTransformEnabled && + this.declarationInfo.typeDeclarations.has(name) && + !this.declarationInfo.valueDeclarations.has(name) + ); + } +} exports.default = ESMImportTransformer; diff --git a/node_modules/sucrase/dist/transformers/ESMImportTransformer.mjs b/node_modules/sucrase/dist/transformers/ESMImportTransformer.mjs new file mode 100644 index 00000000..7edd55e0 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ESMImportTransformer.mjs @@ -0,0 +1,310 @@ + + +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + +import elideImportEquals from "../util/elideImportEquals"; +import getDeclarationInfo, { + + EMPTY_DECLARATION_INFO, +} from "../util/getDeclarationInfo"; +import {getNonTypeIdentifiers} from "../util/getNonTypeIdentifiers"; +import shouldElideDefaultExport from "../util/shouldElideDefaultExport"; + +import Transformer from "./Transformer"; + +/** + * Class for editing import statements when we are keeping the code as ESM. We still need to remove + * type-only imports in TypeScript and Flow. + */ +export default class ESMImportTransformer extends Transformer { + + + + constructor( + tokens, + nameManager, + reactHotLoaderTransformer, + isTypeScriptTransformEnabled, + options, + ) { + super();this.tokens = tokens;this.nameManager = nameManager;this.reactHotLoaderTransformer = reactHotLoaderTransformer;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;; + this.nonTypeIdentifiers = isTypeScriptTransformEnabled + ? getNonTypeIdentifiers(tokens, options) + : new Set(); + this.declarationInfo = isTypeScriptTransformEnabled + ? getDeclarationInfo(tokens) + : EMPTY_DECLARATION_INFO; + } + + process() { + // TypeScript `import foo = require('foo');` should always just be translated to plain require. + if (this.tokens.matches3(tt._import, tt.name, tt.eq)) { + return this.processImportEquals(); + } + if (this.tokens.matches2(tt._export, tt.eq)) { + this.tokens.replaceToken("module.exports"); + return true; + } + if (this.tokens.matches1(tt._import)) { + return this.processImport(); + } + if (this.tokens.matches2(tt._export, tt._default)) { + return this.processExportDefault(); + } + if (this.tokens.matches2(tt._export, tt.braceL)) { + return this.processNamedExports(); + } + return false; + } + + processImportEquals() { + const importName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + if (this.isTypeName(importName)) { + // If this name is only used as a type, elide the whole import. + elideImportEquals(this.tokens); + } else { + // Otherwise, switch `import` to `const`. + this.tokens.replaceToken("const"); + } + return true; + } + + processImport() { + if (this.tokens.matches2(tt._import, tt.parenL)) { + // Dynamic imports don't need to be transformed. + return false; + } + + const snapshot = this.tokens.snapshot(); + const allImportsRemoved = this.removeImportTypeBindings(); + if (allImportsRemoved) { + this.tokens.restoreToSnapshot(snapshot); + while (!this.tokens.matches1(tt.string)) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + if (this.tokens.matches1(tt.semi)) { + this.tokens.removeToken(); + } + } + return true; + } + + /** + * Remove type bindings from this import, leaving the rest of the import intact. + * + * Return true if this import was ONLY types, and thus is eligible for removal. This will bail out + * of the replacement operation, so we can return early here. + */ + removeImportTypeBindings() { + this.tokens.copyExpectedToken(tt._import); + if ( + this.tokens.matchesContextual(ContextualKeyword._type) && + !this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, tt.comma) && + !this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, ContextualKeyword._from) + ) { + // This is an "import type" statement, so exit early. + return true; + } + + if (this.tokens.matches1(tt.string)) { + // This is a bare import, so we should proceed with the import. + this.tokens.copyToken(); + return false; + } + + let foundNonTypeImport = false; + + if (this.tokens.matches1(tt.name)) { + if (this.isTypeName(this.tokens.identifierName())) { + this.tokens.removeToken(); + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + this.tokens.copyToken(); + if (this.tokens.matches1(tt.comma)) { + this.tokens.copyToken(); + } + } + } + + if (this.tokens.matches1(tt.star)) { + if (this.isTypeName(this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 2))) { + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + } else { + foundNonTypeImport = true; + this.tokens.copyExpectedToken(tt.star); + this.tokens.copyExpectedToken(tt.name); + this.tokens.copyExpectedToken(tt.name); + } + } else if (this.tokens.matches1(tt.braceL)) { + this.tokens.copyToken(); + while (!this.tokens.matches1(tt.braceR)) { + if ( + this.tokens.matches3(tt.name, tt.name, tt.comma) || + this.tokens.matches3(tt.name, tt.name, tt.braceR) + ) { + // type foo + this.tokens.removeToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + } else if ( + this.tokens.matches5(tt.name, tt.name, tt.name, tt.name, tt.comma) || + this.tokens.matches5(tt.name, tt.name, tt.name, tt.name, tt.braceR) + ) { + // type foo as bar + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + } else if ( + this.tokens.matches2(tt.name, tt.comma) || + this.tokens.matches2(tt.name, tt.braceR) + ) { + // foo + if (this.isTypeName(this.tokens.identifierName())) { + this.tokens.removeToken(); + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + this.tokens.copyToken(); + if (this.tokens.matches1(tt.comma)) { + this.tokens.copyToken(); + } + } + } else if ( + this.tokens.matches4(tt.name, tt.name, tt.name, tt.comma) || + this.tokens.matches4(tt.name, tt.name, tt.name, tt.braceR) + ) { + // foo as bar + if (this.isTypeName(this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 2))) { + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + this.tokens.copyToken(); + this.tokens.copyToken(); + this.tokens.copyToken(); + if (this.tokens.matches1(tt.comma)) { + this.tokens.copyToken(); + } + } + } else { + throw new Error("Unexpected import form."); + } + } + this.tokens.copyExpectedToken(tt.braceR); + } + + return !foundNonTypeImport; + } + + isTypeName(name) { + return this.isTypeScriptTransformEnabled && !this.nonTypeIdentifiers.has(name); + } + + processExportDefault() { + if ( + shouldElideDefaultExport(this.isTypeScriptTransformEnabled, this.tokens, this.declarationInfo) + ) { + // If the exported value is just an identifier and should be elided by TypeScript + // rules, then remove it entirely. It will always have the form `export default e`, + // where `e` is an identifier. + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + return true; + } + + const alreadyHasName = + this.tokens.matches4(tt._export, tt._default, tt._function, tt.name) || + // export default async function + this.tokens.matches5(tt._export, tt._default, tt.name, tt._function, tt.name) || + this.tokens.matches4(tt._export, tt._default, tt._class, tt.name) || + this.tokens.matches5(tt._export, tt._default, tt._abstract, tt._class, tt.name); + + if (!alreadyHasName && this.reactHotLoaderTransformer) { + // This is a plain "export default E" statement and we need to assign E to a variable. + // Change "export default E" to "let _default; export default _default = E" + const defaultVarName = this.nameManager.claimFreeName("_default"); + this.tokens.replaceToken(`let ${defaultVarName}; export`); + this.tokens.copyToken(); + this.tokens.appendCode(` ${defaultVarName} =`); + this.reactHotLoaderTransformer.setExtractedDefaultExportName(defaultVarName); + return true; + } + return false; + } + + /** + * In TypeScript, we need to remove named exports that were never declared or only declared as a + * type. + */ + processNamedExports() { + if (!this.isTypeScriptTransformEnabled) { + return false; + } + this.tokens.copyExpectedToken(tt._export); + this.tokens.copyExpectedToken(tt.braceL); + + while (!this.tokens.matches1(tt.braceR)) { + if (!this.tokens.matches1(tt.name)) { + throw new Error("Expected identifier at the start of named export."); + } + if (this.shouldElideExportedName(this.tokens.identifierName())) { + while ( + !this.tokens.matches1(tt.comma) && + !this.tokens.matches1(tt.braceR) && + !this.tokens.isAtEnd() + ) { + this.tokens.removeToken(); + } + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + } else { + while ( + !this.tokens.matches1(tt.comma) && + !this.tokens.matches1(tt.braceR) && + !this.tokens.isAtEnd() + ) { + this.tokens.copyToken(); + } + if (this.tokens.matches1(tt.comma)) { + this.tokens.copyToken(); + } + } + } + this.tokens.copyExpectedToken(tt.braceR); + return true; + } + + /** + * ESM elides all imports with the rule that we only elide if we see that it's + * a type and never see it as a value. This is in contract to CJS, which + * elides imports that are completely unknown. + */ + shouldElideExportedName(name) { + return ( + this.isTypeScriptTransformEnabled && + this.declarationInfo.typeDeclarations.has(name) && + !this.declarationInfo.valueDeclarations.has(name) + ); + } +} diff --git a/node_modules/sucrase/dist/transformers/FlowTransformer.d.ts b/node_modules/sucrase/dist/transformers/FlowTransformer.d.ts new file mode 100644 index 00000000..49494d90 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/FlowTransformer.d.ts @@ -0,0 +1,9 @@ +import TokenProcessor from "../TokenProcessor"; +import RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +export default class FlowTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor); + process(): boolean; +} diff --git a/node_modules/sucrase/dist/transformers/FlowTransformer.js b/node_modules/sucrase/dist/transformers/FlowTransformer.js new file mode 100644 index 00000000..27221420 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/FlowTransformer.js @@ -0,0 +1,17 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class FlowTransformer extends _Transformer2.default { + constructor( rootTransformer, tokens) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;; + } + + process() { + return ( + this.rootTransformer.processPossibleArrowParamEnd() || + this.rootTransformer.processPossibleAsyncArrowWithTypeParams() || + this.rootTransformer.processPossibleTypeRange() + ); + } +} exports.default = FlowTransformer; diff --git a/node_modules/sucrase/dist/transformers/FlowTransformer.mjs b/node_modules/sucrase/dist/transformers/FlowTransformer.mjs new file mode 100644 index 00000000..97e2e2be --- /dev/null +++ b/node_modules/sucrase/dist/transformers/FlowTransformer.mjs @@ -0,0 +1,17 @@ + + +import Transformer from "./Transformer"; + +export default class FlowTransformer extends Transformer { + constructor( rootTransformer, tokens) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;; + } + + process() { + return ( + this.rootTransformer.processPossibleArrowParamEnd() || + this.rootTransformer.processPossibleAsyncArrowWithTypeParams() || + this.rootTransformer.processPossibleTypeRange() + ); + } +} diff --git a/node_modules/sucrase/dist/transformers/JSXTransformer.d.ts b/node_modules/sucrase/dist/transformers/JSXTransformer.d.ts new file mode 100644 index 00000000..fd7c6c45 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/JSXTransformer.d.ts @@ -0,0 +1,44 @@ +import CJSImportProcessor from "../CJSImportProcessor"; +import { Options } from "../index"; +import NameManager from "../NameManager"; +import TokenProcessor from "../TokenProcessor"; +import { JSXPragmaInfo } from "../util/getJSXPragmaInfo"; +import RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +export default class JSXTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly importProcessor: CJSImportProcessor | null; + readonly nameManager: NameManager; + readonly options: Options; + lastLineNumber: number; + lastIndex: number; + filenameVarName: string | null; + readonly jsxPragmaInfo: JSXPragmaInfo; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, importProcessor: CJSImportProcessor | null, nameManager: NameManager, options: Options); + process(): boolean; + getPrefixCode(): string; + /** + * Lazily calculate line numbers to avoid unneeded work. We assume this is always called in + * increasing order by index. + */ + getLineNumberForIndex(index: number): number; + getFilenameVarName(): string; + processProps(firstTokenStart: number): void; + processPropKeyName(): void; + processStringPropValue(): void; + /** + * Process the first part of a tag, before any props. + */ + processTagIntro(): void; + processChildren(): void; + processChildTextElement(): void; + processJSXTag(): void; +} +/** + * Spec for identifiers: https://tc39.github.io/ecma262/#prod-IdentifierStart. + * + * Really only treat anything starting with a-z as tag names. `_`, `$`, `é` + * should be treated as copmonent names + */ +export declare function startsWithLowerCase(s: string): boolean; diff --git a/node_modules/sucrase/dist/transformers/JSXTransformer.js b/node_modules/sucrase/dist/transformers/JSXTransformer.js new file mode 100644 index 00000000..07a8442d --- /dev/null +++ b/node_modules/sucrase/dist/transformers/JSXTransformer.js @@ -0,0 +1,398 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); + + +var _xhtml = require('../parser/plugins/jsx/xhtml'); var _xhtml2 = _interopRequireDefault(_xhtml); +var _types = require('../parser/tokenizer/types'); +var _charcodes = require('../parser/util/charcodes'); + +var _getJSXPragmaInfo = require('../util/getJSXPragmaInfo'); var _getJSXPragmaInfo2 = _interopRequireDefault(_getJSXPragmaInfo); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + +const HEX_NUMBER = /^[\da-fA-F]+$/; +const DECIMAL_NUMBER = /^\d+$/; + + class JSXTransformer extends _Transformer2.default { + __init() {this.lastLineNumber = 1} + __init2() {this.lastIndex = 0} + __init3() {this.filenameVarName = null} + + + constructor( + rootTransformer, + tokens, + importProcessor, + nameManager, + options, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.nameManager = nameManager;this.options = options;JSXTransformer.prototype.__init.call(this);JSXTransformer.prototype.__init2.call(this);JSXTransformer.prototype.__init3.call(this);; + this.jsxPragmaInfo = _getJSXPragmaInfo2.default.call(void 0, options); + } + + process() { + if (this.tokens.matches1(_types.TokenType.jsxTagStart)) { + this.processJSXTag(); + return true; + } + return false; + } + + getPrefixCode() { + if (this.filenameVarName) { + return `const ${this.filenameVarName} = ${JSON.stringify(this.options.filePath || "")};`; + } else { + return ""; + } + } + + /** + * Lazily calculate line numbers to avoid unneeded work. We assume this is always called in + * increasing order by index. + */ + getLineNumberForIndex(index) { + const code = this.tokens.code; + while (this.lastIndex < index && this.lastIndex < code.length) { + if (code[this.lastIndex] === "\n") { + this.lastLineNumber++; + } + this.lastIndex++; + } + return this.lastLineNumber; + } + + getFilenameVarName() { + if (!this.filenameVarName) { + this.filenameVarName = this.nameManager.claimFreeName("_jsxFileName"); + } + return this.filenameVarName; + } + + processProps(firstTokenStart) { + const lineNumber = this.getLineNumberForIndex(firstTokenStart); + const devProps = this.options.production + ? "" + : `__self: this, __source: {fileName: ${this.getFilenameVarName()}, lineNumber: ${lineNumber}}`; + if (!this.tokens.matches1(_types.TokenType.jsxName) && !this.tokens.matches1(_types.TokenType.braceL)) { + if (devProps) { + this.tokens.appendCode(`, {${devProps}}`); + } else { + this.tokens.appendCode(`, null`); + } + return; + } + this.tokens.appendCode(`, {`); + while (true) { + if (this.tokens.matches2(_types.TokenType.jsxName, _types.TokenType.eq)) { + this.processPropKeyName(); + this.tokens.replaceToken(": "); + if (this.tokens.matches1(_types.TokenType.braceL)) { + this.tokens.replaceToken(""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } else if (this.tokens.matches1(_types.TokenType.jsxTagStart)) { + this.processJSXTag(); + } else { + this.processStringPropValue(); + } + } else if (this.tokens.matches1(_types.TokenType.jsxName)) { + this.processPropKeyName(); + this.tokens.appendCode(": true"); + } else if (this.tokens.matches1(_types.TokenType.braceL)) { + this.tokens.replaceToken(""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } else { + break; + } + this.tokens.appendCode(","); + } + if (devProps) { + this.tokens.appendCode(` ${devProps}}`); + } else { + this.tokens.appendCode("}"); + } + } + + processPropKeyName() { + const keyName = this.tokens.identifierName(); + if (keyName.includes("-")) { + this.tokens.replaceToken(`'${keyName}'`); + } else { + this.tokens.copyToken(); + } + } + + processStringPropValue() { + const token = this.tokens.currentToken(); + const valueCode = this.tokens.code.slice(token.start + 1, token.end - 1); + const replacementCode = formatJSXTextReplacement(valueCode); + const literalCode = formatJSXStringValueLiteral(valueCode); + this.tokens.replaceToken(literalCode + replacementCode); + } + + /** + * Process the first part of a tag, before any props. + */ + processTagIntro() { + // Walk forward until we see one of these patterns: + // jsxName to start the first prop, preceded by another jsxName to end the tag name. + // jsxName to start the first prop, preceded by greaterThan to end the type argument. + // [open brace] to start the first prop. + // [jsxTagEnd] to end the open-tag. + // [slash, jsxTagEnd] to end the self-closing tag. + let introEnd = this.tokens.currentIndex() + 1; + while ( + this.tokens.tokens[introEnd].isType || + (!this.tokens.matches2AtIndex(introEnd - 1, _types.TokenType.jsxName, _types.TokenType.jsxName) && + !this.tokens.matches2AtIndex(introEnd - 1, _types.TokenType.greaterThan, _types.TokenType.jsxName) && + !this.tokens.matches1AtIndex(introEnd, _types.TokenType.braceL) && + !this.tokens.matches1AtIndex(introEnd, _types.TokenType.jsxTagEnd) && + !this.tokens.matches2AtIndex(introEnd, _types.TokenType.slash, _types.TokenType.jsxTagEnd)) + ) { + introEnd++; + } + if (introEnd === this.tokens.currentIndex() + 1) { + const tagName = this.tokens.identifierName(); + if (startsWithLowerCase(tagName)) { + this.tokens.replaceToken(`'${tagName}'`); + } + } + while (this.tokens.currentIndex() < introEnd) { + this.rootTransformer.processToken(); + } + } + + processChildren() { + while (true) { + if (this.tokens.matches2(_types.TokenType.jsxTagStart, _types.TokenType.slash)) { + // Closing tag, so no more children. + return; + } + if (this.tokens.matches1(_types.TokenType.braceL)) { + if (this.tokens.matches2(_types.TokenType.braceL, _types.TokenType.braceR)) { + // Empty interpolations and comment-only interpolations are allowed + // and don't create an extra child arg. + this.tokens.replaceToken(""); + this.tokens.replaceToken(""); + } else { + // Interpolated expression. + this.tokens.replaceToken(", "); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } + } else if (this.tokens.matches1(_types.TokenType.jsxTagStart)) { + // Child JSX element + this.tokens.appendCode(", "); + this.processJSXTag(); + } else if (this.tokens.matches1(_types.TokenType.jsxText)) { + this.processChildTextElement(); + } else { + throw new Error("Unexpected token when processing JSX children."); + } + } + } + + processChildTextElement() { + const token = this.tokens.currentToken(); + const valueCode = this.tokens.code.slice(token.start, token.end); + const replacementCode = formatJSXTextReplacement(valueCode); + const literalCode = formatJSXTextLiteral(valueCode); + if (literalCode === '""') { + this.tokens.replaceToken(replacementCode); + } else { + this.tokens.replaceToken(`, ${literalCode}${replacementCode}`); + } + } + + processJSXTag() { + const {jsxPragmaInfo} = this; + const resolvedPragmaBaseName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement(jsxPragmaInfo.base) || jsxPragmaInfo.base + : jsxPragmaInfo.base; + const firstTokenStart = this.tokens.currentToken().start; + // First tag is always jsxTagStart. + this.tokens.replaceToken(`${resolvedPragmaBaseName}${jsxPragmaInfo.suffix}(`); + + if (this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + // Fragment syntax. + const resolvedFragmentPragmaBaseName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement(jsxPragmaInfo.fragmentBase) || + jsxPragmaInfo.fragmentBase + : jsxPragmaInfo.fragmentBase; + this.tokens.replaceToken( + `${resolvedFragmentPragmaBaseName}${jsxPragmaInfo.fragmentSuffix}, null`, + ); + // Tag with children. + this.processChildren(); + while (!this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + this.tokens.replaceToken(""); + } + this.tokens.replaceToken(")"); + } else { + // Normal open tag or self-closing tag. + this.processTagIntro(); + this.processProps(firstTokenStart); + + if (this.tokens.matches2(_types.TokenType.slash, _types.TokenType.jsxTagEnd)) { + // Self-closing tag. + this.tokens.replaceToken(""); + this.tokens.replaceToken(")"); + } else if (this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + this.tokens.replaceToken(""); + // Tag with children. + this.processChildren(); + while (!this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + this.tokens.replaceToken(""); + } + this.tokens.replaceToken(")"); + } else { + throw new Error("Expected either /> or > at the end of the tag."); + } + } + } +} exports.default = JSXTransformer; + +/** + * Spec for identifiers: https://tc39.github.io/ecma262/#prod-IdentifierStart. + * + * Really only treat anything starting with a-z as tag names. `_`, `$`, `é` + * should be treated as copmonent names + */ + function startsWithLowerCase(s) { + const firstChar = s.charCodeAt(0); + return firstChar >= _charcodes.charCodes.lowercaseA && firstChar <= _charcodes.charCodes.lowercaseZ; +} exports.startsWithLowerCase = startsWithLowerCase; + +/** + * Turn the given jsxText string into a JS string literal. Leading and trailing + * whitespace on lines is removed, except immediately after the open-tag and + * before the close-tag. Empty lines are completely removed, and spaces are + * added between lines after that. + * + * We use JSON.stringify to introduce escape characters as necessary, and trim + * the start and end of each line and remove blank lines. + */ +function formatJSXTextLiteral(text) { + let result = ""; + let whitespace = ""; + + let isInInitialLineWhitespace = false; + let seenNonWhitespace = false; + for (let i = 0; i < text.length; i++) { + const c = text[i]; + if (c === " " || c === "\t" || c === "\r") { + if (!isInInitialLineWhitespace) { + whitespace += c; + } + } else if (c === "\n") { + whitespace = ""; + isInInitialLineWhitespace = true; + } else { + if (seenNonWhitespace && isInInitialLineWhitespace) { + result += " "; + } + result += whitespace; + whitespace = ""; + if (c === "&") { + const {entity, newI} = processEntity(text, i + 1); + i = newI - 1; + result += entity; + } else { + result += c; + } + seenNonWhitespace = true; + isInInitialLineWhitespace = false; + } + } + if (!isInInitialLineWhitespace) { + result += whitespace; + } + return JSON.stringify(result); +} + +/** + * Produce the code that should be printed after the JSX text string literal, + * with most content removed, but all newlines preserved and all spacing at the + * end preserved. + */ +function formatJSXTextReplacement(text) { + let numNewlines = 0; + let numSpaces = 0; + for (const c of text) { + if (c === "\n") { + numNewlines++; + numSpaces = 0; + } else if (c === " ") { + numSpaces++; + } + } + return "\n".repeat(numNewlines) + " ".repeat(numSpaces); +} + +/** + * Format a string in the value position of a JSX prop. + * + * Use the same implementation as convertAttribute from + * babel-helper-builder-react-jsx. + */ +function formatJSXStringValueLiteral(text) { + let result = ""; + for (let i = 0; i < text.length; i++) { + const c = text[i]; + if (c === "\n") { + if (/\s/.test(text[i + 1])) { + result += " "; + while (i < text.length && /\s/.test(text[i + 1])) { + i++; + } + } else { + result += "\n"; + } + } else if (c === "&") { + const {entity, newI} = processEntity(text, i + 1); + result += entity; + i = newI - 1; + } else { + result += c; + } + } + return JSON.stringify(result); +} + +/** + * Modified from jsxReadString in Babylon. + */ +function processEntity(text, indexAfterAmpersand) { + let str = ""; + let count = 0; + let entity; + let i = indexAfterAmpersand; + + while (i < text.length && count++ < 10) { + const ch = text[i]; + i++; + if (ch === ";") { + if (str[0] === "#") { + if (str[1] === "x") { + str = str.substr(2); + if (HEX_NUMBER.test(str)) { + entity = String.fromCodePoint(parseInt(str, 16)); + } + } else { + str = str.substr(1); + if (DECIMAL_NUMBER.test(str)) { + entity = String.fromCodePoint(parseInt(str, 10)); + } + } + } else { + entity = _xhtml2.default[str]; + } + break; + } + str += ch; + } + if (!entity) { + return {entity: "&", newI: indexAfterAmpersand}; + } + return {entity, newI: i}; +} diff --git a/node_modules/sucrase/dist/transformers/JSXTransformer.mjs b/node_modules/sucrase/dist/transformers/JSXTransformer.mjs new file mode 100644 index 00000000..2bf8e007 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/JSXTransformer.mjs @@ -0,0 +1,398 @@ + + + +import XHTMLEntities from "../parser/plugins/jsx/xhtml"; +import {TokenType as tt} from "../parser/tokenizer/types"; +import {charCodes} from "../parser/util/charcodes"; + +import getJSXPragmaInfo, {} from "../util/getJSXPragmaInfo"; + +import Transformer from "./Transformer"; + +const HEX_NUMBER = /^[\da-fA-F]+$/; +const DECIMAL_NUMBER = /^\d+$/; + +export default class JSXTransformer extends Transformer { + __init() {this.lastLineNumber = 1} + __init2() {this.lastIndex = 0} + __init3() {this.filenameVarName = null} + + + constructor( + rootTransformer, + tokens, + importProcessor, + nameManager, + options, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.nameManager = nameManager;this.options = options;JSXTransformer.prototype.__init.call(this);JSXTransformer.prototype.__init2.call(this);JSXTransformer.prototype.__init3.call(this);; + this.jsxPragmaInfo = getJSXPragmaInfo(options); + } + + process() { + if (this.tokens.matches1(tt.jsxTagStart)) { + this.processJSXTag(); + return true; + } + return false; + } + + getPrefixCode() { + if (this.filenameVarName) { + return `const ${this.filenameVarName} = ${JSON.stringify(this.options.filePath || "")};`; + } else { + return ""; + } + } + + /** + * Lazily calculate line numbers to avoid unneeded work. We assume this is always called in + * increasing order by index. + */ + getLineNumberForIndex(index) { + const code = this.tokens.code; + while (this.lastIndex < index && this.lastIndex < code.length) { + if (code[this.lastIndex] === "\n") { + this.lastLineNumber++; + } + this.lastIndex++; + } + return this.lastLineNumber; + } + + getFilenameVarName() { + if (!this.filenameVarName) { + this.filenameVarName = this.nameManager.claimFreeName("_jsxFileName"); + } + return this.filenameVarName; + } + + processProps(firstTokenStart) { + const lineNumber = this.getLineNumberForIndex(firstTokenStart); + const devProps = this.options.production + ? "" + : `__self: this, __source: {fileName: ${this.getFilenameVarName()}, lineNumber: ${lineNumber}}`; + if (!this.tokens.matches1(tt.jsxName) && !this.tokens.matches1(tt.braceL)) { + if (devProps) { + this.tokens.appendCode(`, {${devProps}}`); + } else { + this.tokens.appendCode(`, null`); + } + return; + } + this.tokens.appendCode(`, {`); + while (true) { + if (this.tokens.matches2(tt.jsxName, tt.eq)) { + this.processPropKeyName(); + this.tokens.replaceToken(": "); + if (this.tokens.matches1(tt.braceL)) { + this.tokens.replaceToken(""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } else if (this.tokens.matches1(tt.jsxTagStart)) { + this.processJSXTag(); + } else { + this.processStringPropValue(); + } + } else if (this.tokens.matches1(tt.jsxName)) { + this.processPropKeyName(); + this.tokens.appendCode(": true"); + } else if (this.tokens.matches1(tt.braceL)) { + this.tokens.replaceToken(""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } else { + break; + } + this.tokens.appendCode(","); + } + if (devProps) { + this.tokens.appendCode(` ${devProps}}`); + } else { + this.tokens.appendCode("}"); + } + } + + processPropKeyName() { + const keyName = this.tokens.identifierName(); + if (keyName.includes("-")) { + this.tokens.replaceToken(`'${keyName}'`); + } else { + this.tokens.copyToken(); + } + } + + processStringPropValue() { + const token = this.tokens.currentToken(); + const valueCode = this.tokens.code.slice(token.start + 1, token.end - 1); + const replacementCode = formatJSXTextReplacement(valueCode); + const literalCode = formatJSXStringValueLiteral(valueCode); + this.tokens.replaceToken(literalCode + replacementCode); + } + + /** + * Process the first part of a tag, before any props. + */ + processTagIntro() { + // Walk forward until we see one of these patterns: + // jsxName to start the first prop, preceded by another jsxName to end the tag name. + // jsxName to start the first prop, preceded by greaterThan to end the type argument. + // [open brace] to start the first prop. + // [jsxTagEnd] to end the open-tag. + // [slash, jsxTagEnd] to end the self-closing tag. + let introEnd = this.tokens.currentIndex() + 1; + while ( + this.tokens.tokens[introEnd].isType || + (!this.tokens.matches2AtIndex(introEnd - 1, tt.jsxName, tt.jsxName) && + !this.tokens.matches2AtIndex(introEnd - 1, tt.greaterThan, tt.jsxName) && + !this.tokens.matches1AtIndex(introEnd, tt.braceL) && + !this.tokens.matches1AtIndex(introEnd, tt.jsxTagEnd) && + !this.tokens.matches2AtIndex(introEnd, tt.slash, tt.jsxTagEnd)) + ) { + introEnd++; + } + if (introEnd === this.tokens.currentIndex() + 1) { + const tagName = this.tokens.identifierName(); + if (startsWithLowerCase(tagName)) { + this.tokens.replaceToken(`'${tagName}'`); + } + } + while (this.tokens.currentIndex() < introEnd) { + this.rootTransformer.processToken(); + } + } + + processChildren() { + while (true) { + if (this.tokens.matches2(tt.jsxTagStart, tt.slash)) { + // Closing tag, so no more children. + return; + } + if (this.tokens.matches1(tt.braceL)) { + if (this.tokens.matches2(tt.braceL, tt.braceR)) { + // Empty interpolations and comment-only interpolations are allowed + // and don't create an extra child arg. + this.tokens.replaceToken(""); + this.tokens.replaceToken(""); + } else { + // Interpolated expression. + this.tokens.replaceToken(", "); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } + } else if (this.tokens.matches1(tt.jsxTagStart)) { + // Child JSX element + this.tokens.appendCode(", "); + this.processJSXTag(); + } else if (this.tokens.matches1(tt.jsxText)) { + this.processChildTextElement(); + } else { + throw new Error("Unexpected token when processing JSX children."); + } + } + } + + processChildTextElement() { + const token = this.tokens.currentToken(); + const valueCode = this.tokens.code.slice(token.start, token.end); + const replacementCode = formatJSXTextReplacement(valueCode); + const literalCode = formatJSXTextLiteral(valueCode); + if (literalCode === '""') { + this.tokens.replaceToken(replacementCode); + } else { + this.tokens.replaceToken(`, ${literalCode}${replacementCode}`); + } + } + + processJSXTag() { + const {jsxPragmaInfo} = this; + const resolvedPragmaBaseName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement(jsxPragmaInfo.base) || jsxPragmaInfo.base + : jsxPragmaInfo.base; + const firstTokenStart = this.tokens.currentToken().start; + // First tag is always jsxTagStart. + this.tokens.replaceToken(`${resolvedPragmaBaseName}${jsxPragmaInfo.suffix}(`); + + if (this.tokens.matches1(tt.jsxTagEnd)) { + // Fragment syntax. + const resolvedFragmentPragmaBaseName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement(jsxPragmaInfo.fragmentBase) || + jsxPragmaInfo.fragmentBase + : jsxPragmaInfo.fragmentBase; + this.tokens.replaceToken( + `${resolvedFragmentPragmaBaseName}${jsxPragmaInfo.fragmentSuffix}, null`, + ); + // Tag with children. + this.processChildren(); + while (!this.tokens.matches1(tt.jsxTagEnd)) { + this.tokens.replaceToken(""); + } + this.tokens.replaceToken(")"); + } else { + // Normal open tag or self-closing tag. + this.processTagIntro(); + this.processProps(firstTokenStart); + + if (this.tokens.matches2(tt.slash, tt.jsxTagEnd)) { + // Self-closing tag. + this.tokens.replaceToken(""); + this.tokens.replaceToken(")"); + } else if (this.tokens.matches1(tt.jsxTagEnd)) { + this.tokens.replaceToken(""); + // Tag with children. + this.processChildren(); + while (!this.tokens.matches1(tt.jsxTagEnd)) { + this.tokens.replaceToken(""); + } + this.tokens.replaceToken(")"); + } else { + throw new Error("Expected either /> or > at the end of the tag."); + } + } + } +} + +/** + * Spec for identifiers: https://tc39.github.io/ecma262/#prod-IdentifierStart. + * + * Really only treat anything starting with a-z as tag names. `_`, `$`, `é` + * should be treated as copmonent names + */ +export function startsWithLowerCase(s) { + const firstChar = s.charCodeAt(0); + return firstChar >= charCodes.lowercaseA && firstChar <= charCodes.lowercaseZ; +} + +/** + * Turn the given jsxText string into a JS string literal. Leading and trailing + * whitespace on lines is removed, except immediately after the open-tag and + * before the close-tag. Empty lines are completely removed, and spaces are + * added between lines after that. + * + * We use JSON.stringify to introduce escape characters as necessary, and trim + * the start and end of each line and remove blank lines. + */ +function formatJSXTextLiteral(text) { + let result = ""; + let whitespace = ""; + + let isInInitialLineWhitespace = false; + let seenNonWhitespace = false; + for (let i = 0; i < text.length; i++) { + const c = text[i]; + if (c === " " || c === "\t" || c === "\r") { + if (!isInInitialLineWhitespace) { + whitespace += c; + } + } else if (c === "\n") { + whitespace = ""; + isInInitialLineWhitespace = true; + } else { + if (seenNonWhitespace && isInInitialLineWhitespace) { + result += " "; + } + result += whitespace; + whitespace = ""; + if (c === "&") { + const {entity, newI} = processEntity(text, i + 1); + i = newI - 1; + result += entity; + } else { + result += c; + } + seenNonWhitespace = true; + isInInitialLineWhitespace = false; + } + } + if (!isInInitialLineWhitespace) { + result += whitespace; + } + return JSON.stringify(result); +} + +/** + * Produce the code that should be printed after the JSX text string literal, + * with most content removed, but all newlines preserved and all spacing at the + * end preserved. + */ +function formatJSXTextReplacement(text) { + let numNewlines = 0; + let numSpaces = 0; + for (const c of text) { + if (c === "\n") { + numNewlines++; + numSpaces = 0; + } else if (c === " ") { + numSpaces++; + } + } + return "\n".repeat(numNewlines) + " ".repeat(numSpaces); +} + +/** + * Format a string in the value position of a JSX prop. + * + * Use the same implementation as convertAttribute from + * babel-helper-builder-react-jsx. + */ +function formatJSXStringValueLiteral(text) { + let result = ""; + for (let i = 0; i < text.length; i++) { + const c = text[i]; + if (c === "\n") { + if (/\s/.test(text[i + 1])) { + result += " "; + while (i < text.length && /\s/.test(text[i + 1])) { + i++; + } + } else { + result += "\n"; + } + } else if (c === "&") { + const {entity, newI} = processEntity(text, i + 1); + result += entity; + i = newI - 1; + } else { + result += c; + } + } + return JSON.stringify(result); +} + +/** + * Modified from jsxReadString in Babylon. + */ +function processEntity(text, indexAfterAmpersand) { + let str = ""; + let count = 0; + let entity; + let i = indexAfterAmpersand; + + while (i < text.length && count++ < 10) { + const ch = text[i]; + i++; + if (ch === ";") { + if (str[0] === "#") { + if (str[1] === "x") { + str = str.substr(2); + if (HEX_NUMBER.test(str)) { + entity = String.fromCodePoint(parseInt(str, 16)); + } + } else { + str = str.substr(1); + if (DECIMAL_NUMBER.test(str)) { + entity = String.fromCodePoint(parseInt(str, 10)); + } + } + } else { + entity = XHTMLEntities[str]; + } + break; + } + str += ch; + } + if (!entity) { + return {entity: "&", newI: indexAfterAmpersand}; + } + return {entity, newI: i}; +} diff --git a/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.d.ts b/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.d.ts new file mode 100644 index 00000000..a7d47059 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.d.ts @@ -0,0 +1,7 @@ +import TokenProcessor from "../TokenProcessor"; +import Transformer from "./Transformer"; +export default class NumericSeparatorTransformer extends Transformer { + readonly tokens: TokenProcessor; + constructor(tokens: TokenProcessor); + process(): boolean; +} diff --git a/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.js b/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.js new file mode 100644 index 00000000..f91dde1d --- /dev/null +++ b/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.js @@ -0,0 +1,20 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _types = require('../parser/tokenizer/types'); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class NumericSeparatorTransformer extends _Transformer2.default { + constructor( tokens) { + super();this.tokens = tokens;; + } + + process() { + if (this.tokens.matches1(_types.TokenType.num)) { + const code = this.tokens.currentTokenCode(); + if (code.includes("_")) { + this.tokens.replaceToken(code.replace(/_/g, "")); + return true; + } + } + return false; + } +} exports.default = NumericSeparatorTransformer; diff --git a/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.mjs b/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.mjs new file mode 100644 index 00000000..0cb01a16 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.mjs @@ -0,0 +1,20 @@ +import {TokenType as tt} from "../parser/tokenizer/types"; + +import Transformer from "./Transformer"; + +export default class NumericSeparatorTransformer extends Transformer { + constructor( tokens) { + super();this.tokens = tokens;; + } + + process() { + if (this.tokens.matches1(tt.num)) { + const code = this.tokens.currentTokenCode(); + if (code.includes("_")) { + this.tokens.replaceToken(code.replace(/_/g, "")); + return true; + } + } + return false; + } +} diff --git a/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.d.ts b/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.d.ts new file mode 100644 index 00000000..23d835e1 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.d.ts @@ -0,0 +1,9 @@ +import NameManager from "../NameManager"; +import TokenProcessor from "../TokenProcessor"; +import Transformer from "./Transformer"; +export default class OptionalCatchBindingTransformer extends Transformer { + readonly tokens: TokenProcessor; + readonly nameManager: NameManager; + constructor(tokens: TokenProcessor, nameManager: NameManager); + process(): boolean; +} diff --git a/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.js b/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.js new file mode 100644 index 00000000..f4dc91db --- /dev/null +++ b/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.js @@ -0,0 +1,19 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); +var _types = require('../parser/tokenizer/types'); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class OptionalCatchBindingTransformer extends _Transformer2.default { + constructor( tokens, nameManager) { + super();this.tokens = tokens;this.nameManager = nameManager;; + } + + process() { + if (this.tokens.matches2(_types.TokenType._catch, _types.TokenType.braceL)) { + this.tokens.copyToken(); + this.tokens.appendCode(` (${this.nameManager.claimFreeName("e")})`); + return true; + } + return false; + } +} exports.default = OptionalCatchBindingTransformer; diff --git a/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.mjs b/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.mjs new file mode 100644 index 00000000..547273b4 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.mjs @@ -0,0 +1,19 @@ + +import {TokenType as tt} from "../parser/tokenizer/types"; + +import Transformer from "./Transformer"; + +export default class OptionalCatchBindingTransformer extends Transformer { + constructor( tokens, nameManager) { + super();this.tokens = tokens;this.nameManager = nameManager;; + } + + process() { + if (this.tokens.matches2(tt._catch, tt.braceL)) { + this.tokens.copyToken(); + this.tokens.appendCode(` (${this.nameManager.claimFreeName("e")})`); + return true; + } + return false; + } +} diff --git a/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.d.ts b/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.d.ts new file mode 100644 index 00000000..1c246394 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.d.ts @@ -0,0 +1,29 @@ +import CJSImportProcessor from "../CJSImportProcessor"; +import { Options } from "../index"; +import TokenProcessor from "../TokenProcessor"; +import RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +/** + * Implementation of babel-plugin-transform-react-display-name, which adds a + * display name to usages of React.createClass and createReactClass. + */ +export default class ReactDisplayNameTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly importProcessor: CJSImportProcessor | null; + readonly options: Options; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, importProcessor: CJSImportProcessor | null, options: Options); + process(): boolean; + /** + * This is called with the token position at the open-paren. + */ + private tryProcessCreateClassCall; + private findDisplayName; + private getDisplayNameFromFilename; + /** + * We only want to add a display name when this is a function call containing + * one argument, which is an object literal without `displayName` as an + * existing key. + */ + private classNeedsDisplayName; +} diff --git a/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.js b/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.js new file mode 100644 index 00000000..6b5cf80f --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.js @@ -0,0 +1,160 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); + +var _tokenizer = require('../parser/tokenizer'); +var _types = require('../parser/tokenizer/types'); + + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + +/** + * Implementation of babel-plugin-transform-react-display-name, which adds a + * display name to usages of React.createClass and createReactClass. + */ + class ReactDisplayNameTransformer extends _Transformer2.default { + constructor( + rootTransformer, + tokens, + importProcessor, + options, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.options = options;; + } + + process() { + const startIndex = this.tokens.currentIndex(); + if (this.tokens.identifierName() === "createReactClass") { + const newName = + this.importProcessor && this.importProcessor.getIdentifierReplacement("createReactClass"); + if (newName) { + this.tokens.replaceToken(`(0, ${newName})`); + } else { + this.tokens.copyToken(); + } + this.tryProcessCreateClassCall(startIndex); + return true; + } + if ( + this.tokens.matches3(_types.TokenType.name, _types.TokenType.dot, _types.TokenType.name) && + this.tokens.identifierName() === "React" && + this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 2) === "createClass" + ) { + const newName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement("React") || "React" + : "React"; + if (newName) { + this.tokens.replaceToken(newName); + this.tokens.copyToken(); + this.tokens.copyToken(); + } else { + this.tokens.copyToken(); + this.tokens.copyToken(); + this.tokens.copyToken(); + } + this.tryProcessCreateClassCall(startIndex); + return true; + } + return false; + } + + /** + * This is called with the token position at the open-paren. + */ + tryProcessCreateClassCall(startIndex) { + const displayName = this.findDisplayName(startIndex); + if (!displayName) { + return; + } + + if (this.classNeedsDisplayName()) { + this.tokens.copyExpectedToken(_types.TokenType.parenL); + this.tokens.copyExpectedToken(_types.TokenType.braceL); + this.tokens.appendCode(`displayName: '${displayName}',`); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.braceR); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + } + } + + findDisplayName(startIndex) { + if (startIndex < 2) { + return null; + } + if (this.tokens.matches2AtIndex(startIndex - 2, _types.TokenType.name, _types.TokenType.eq)) { + // This is an assignment (or declaration) and the LHS is either an identifier or a member + // expression ending in an identifier, so use that identifier name. + return this.tokens.identifierNameAtIndex(startIndex - 2); + } + if ( + startIndex >= 2 && + this.tokens.tokens[startIndex - 2].identifierRole === _tokenizer.IdentifierRole.ObjectKey + ) { + // This is an object literal value. + return this.tokens.identifierNameAtIndex(startIndex - 2); + } + if (this.tokens.matches2AtIndex(startIndex - 2, _types.TokenType._export, _types.TokenType._default)) { + return this.getDisplayNameFromFilename(); + } + return null; + } + + getDisplayNameFromFilename() { + const filePath = this.options.filePath || "unknown"; + const pathSegments = filePath.split("/"); + const filename = pathSegments[pathSegments.length - 1]; + const dotIndex = filename.lastIndexOf("."); + const baseFilename = dotIndex === -1 ? filename : filename.slice(0, dotIndex); + if (baseFilename === "index" && pathSegments[pathSegments.length - 2]) { + return pathSegments[pathSegments.length - 2]; + } else { + return baseFilename; + } + } + + /** + * We only want to add a display name when this is a function call containing + * one argument, which is an object literal without `displayName` as an + * existing key. + */ + classNeedsDisplayName() { + let index = this.tokens.currentIndex(); + if (!this.tokens.matches2(_types.TokenType.parenL, _types.TokenType.braceL)) { + return false; + } + // The block starts on the {, and we expect any displayName key to be in + // that context. We need to ignore other other contexts to avoid matching + // nested displayName keys. + const objectStartIndex = index + 1; + const objectContextId = this.tokens.tokens[objectStartIndex].contextId; + if (objectContextId == null) { + throw new Error("Expected non-null context ID on object open-brace."); + } + + for (; index < this.tokens.tokens.length; index++) { + const token = this.tokens.tokens[index]; + if (token.type === _types.TokenType.braceR && token.contextId === objectContextId) { + index++; + break; + } + + if ( + this.tokens.identifierNameAtIndex(index) === "displayName" && + this.tokens.tokens[index].identifierRole === _tokenizer.IdentifierRole.ObjectKey && + token.contextId === objectContextId + ) { + // We found a displayName key, so bail out. + return false; + } + } + + if (index === this.tokens.tokens.length) { + throw new Error("Unexpected end of input when processing React class."); + } + + // If we got this far, we know we have createClass with an object with no + // display name, so we want to proceed as long as that was the only argument. + return ( + this.tokens.matches1AtIndex(index, _types.TokenType.parenR) || + this.tokens.matches2AtIndex(index, _types.TokenType.comma, _types.TokenType.parenR) + ); + } +} exports.default = ReactDisplayNameTransformer; diff --git a/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.mjs b/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.mjs new file mode 100644 index 00000000..0c44c818 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.mjs @@ -0,0 +1,160 @@ + + +import {IdentifierRole} from "../parser/tokenizer"; +import {TokenType as tt} from "../parser/tokenizer/types"; + + +import Transformer from "./Transformer"; + +/** + * Implementation of babel-plugin-transform-react-display-name, which adds a + * display name to usages of React.createClass and createReactClass. + */ +export default class ReactDisplayNameTransformer extends Transformer { + constructor( + rootTransformer, + tokens, + importProcessor, + options, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.options = options;; + } + + process() { + const startIndex = this.tokens.currentIndex(); + if (this.tokens.identifierName() === "createReactClass") { + const newName = + this.importProcessor && this.importProcessor.getIdentifierReplacement("createReactClass"); + if (newName) { + this.tokens.replaceToken(`(0, ${newName})`); + } else { + this.tokens.copyToken(); + } + this.tryProcessCreateClassCall(startIndex); + return true; + } + if ( + this.tokens.matches3(tt.name, tt.dot, tt.name) && + this.tokens.identifierName() === "React" && + this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 2) === "createClass" + ) { + const newName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement("React") || "React" + : "React"; + if (newName) { + this.tokens.replaceToken(newName); + this.tokens.copyToken(); + this.tokens.copyToken(); + } else { + this.tokens.copyToken(); + this.tokens.copyToken(); + this.tokens.copyToken(); + } + this.tryProcessCreateClassCall(startIndex); + return true; + } + return false; + } + + /** + * This is called with the token position at the open-paren. + */ + tryProcessCreateClassCall(startIndex) { + const displayName = this.findDisplayName(startIndex); + if (!displayName) { + return; + } + + if (this.classNeedsDisplayName()) { + this.tokens.copyExpectedToken(tt.parenL); + this.tokens.copyExpectedToken(tt.braceL); + this.tokens.appendCode(`displayName: '${displayName}',`); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.braceR); + this.tokens.copyExpectedToken(tt.parenR); + } + } + + findDisplayName(startIndex) { + if (startIndex < 2) { + return null; + } + if (this.tokens.matches2AtIndex(startIndex - 2, tt.name, tt.eq)) { + // This is an assignment (or declaration) and the LHS is either an identifier or a member + // expression ending in an identifier, so use that identifier name. + return this.tokens.identifierNameAtIndex(startIndex - 2); + } + if ( + startIndex >= 2 && + this.tokens.tokens[startIndex - 2].identifierRole === IdentifierRole.ObjectKey + ) { + // This is an object literal value. + return this.tokens.identifierNameAtIndex(startIndex - 2); + } + if (this.tokens.matches2AtIndex(startIndex - 2, tt._export, tt._default)) { + return this.getDisplayNameFromFilename(); + } + return null; + } + + getDisplayNameFromFilename() { + const filePath = this.options.filePath || "unknown"; + const pathSegments = filePath.split("/"); + const filename = pathSegments[pathSegments.length - 1]; + const dotIndex = filename.lastIndexOf("."); + const baseFilename = dotIndex === -1 ? filename : filename.slice(0, dotIndex); + if (baseFilename === "index" && pathSegments[pathSegments.length - 2]) { + return pathSegments[pathSegments.length - 2]; + } else { + return baseFilename; + } + } + + /** + * We only want to add a display name when this is a function call containing + * one argument, which is an object literal without `displayName` as an + * existing key. + */ + classNeedsDisplayName() { + let index = this.tokens.currentIndex(); + if (!this.tokens.matches2(tt.parenL, tt.braceL)) { + return false; + } + // The block starts on the {, and we expect any displayName key to be in + // that context. We need to ignore other other contexts to avoid matching + // nested displayName keys. + const objectStartIndex = index + 1; + const objectContextId = this.tokens.tokens[objectStartIndex].contextId; + if (objectContextId == null) { + throw new Error("Expected non-null context ID on object open-brace."); + } + + for (; index < this.tokens.tokens.length; index++) { + const token = this.tokens.tokens[index]; + if (token.type === tt.braceR && token.contextId === objectContextId) { + index++; + break; + } + + if ( + this.tokens.identifierNameAtIndex(index) === "displayName" && + this.tokens.tokens[index].identifierRole === IdentifierRole.ObjectKey && + token.contextId === objectContextId + ) { + // We found a displayName key, so bail out. + return false; + } + } + + if (index === this.tokens.tokens.length) { + throw new Error("Unexpected end of input when processing React class."); + } + + // If we got this far, we know we have createClass with an object with no + // display name, so we want to proceed as long as that was the only argument. + return ( + this.tokens.matches1AtIndex(index, tt.parenR) || + this.tokens.matches2AtIndex(index, tt.comma, tt.parenR) + ); + } +} diff --git a/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.d.ts b/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.d.ts new file mode 100644 index 00000000..42b3b5eb --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.d.ts @@ -0,0 +1,12 @@ +import TokenProcessor from "../TokenProcessor"; +import Transformer from "./Transformer"; +export default class ReactHotLoaderTransformer extends Transformer { + readonly tokens: TokenProcessor; + readonly filePath: string; + private extractedDefaultExportName; + constructor(tokens: TokenProcessor, filePath: string); + setExtractedDefaultExportName(extractedDefaultExportName: string): void; + getPrefixCode(): string; + getSuffixCode(): string; + process(): boolean; +} diff --git a/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.js b/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.js new file mode 100644 index 00000000..d35d0fa4 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.js @@ -0,0 +1,67 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _tokenizer = require('../parser/tokenizer'); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class ReactHotLoaderTransformer extends _Transformer2.default { + __init() {this.extractedDefaultExportName = null} + + constructor( tokens, filePath) { + super();this.tokens = tokens;this.filePath = filePath;ReactHotLoaderTransformer.prototype.__init.call(this);; + } + + setExtractedDefaultExportName(extractedDefaultExportName) { + this.extractedDefaultExportName = extractedDefaultExportName; + } + + getPrefixCode() { + return ` + (function () { + var enterModule = require('react-hot-loader').enterModule; + enterModule && enterModule(module); + })();` + .replace(/\s+/g, " ") + .trim(); + } + + getSuffixCode() { + const topLevelNames = new Set(); + for (const token of this.tokens.tokens) { + if ( + !token.isType && + _tokenizer.isTopLevelDeclaration.call(void 0, token) && + token.identifierRole !== _tokenizer.IdentifierRole.ImportDeclaration + ) { + topLevelNames.add(this.tokens.identifierNameForToken(token)); + } + } + const namesToRegister = Array.from(topLevelNames).map((name) => ({ + variableName: name, + uniqueLocalName: name, + })); + if (this.extractedDefaultExportName) { + namesToRegister.push({ + variableName: this.extractedDefaultExportName, + uniqueLocalName: "default", + }); + } + return ` +;(function () { + var reactHotLoader = require('react-hot-loader').default; + var leaveModule = require('react-hot-loader').leaveModule; + if (!reactHotLoader) { + return; + } +${namesToRegister + .map( + ({variableName, uniqueLocalName}) => + ` reactHotLoader.register(${variableName}, "${uniqueLocalName}", "${this.filePath}");`, + ) + .join("\n")} + leaveModule(module); +})();`; + } + + process() { + return false; + } +} exports.default = ReactHotLoaderTransformer; diff --git a/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.mjs b/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.mjs new file mode 100644 index 00000000..f408cb2a --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.mjs @@ -0,0 +1,67 @@ +import {IdentifierRole, isTopLevelDeclaration} from "../parser/tokenizer"; + +import Transformer from "./Transformer"; + +export default class ReactHotLoaderTransformer extends Transformer { + __init() {this.extractedDefaultExportName = null} + + constructor( tokens, filePath) { + super();this.tokens = tokens;this.filePath = filePath;ReactHotLoaderTransformer.prototype.__init.call(this);; + } + + setExtractedDefaultExportName(extractedDefaultExportName) { + this.extractedDefaultExportName = extractedDefaultExportName; + } + + getPrefixCode() { + return ` + (function () { + var enterModule = require('react-hot-loader').enterModule; + enterModule && enterModule(module); + })();` + .replace(/\s+/g, " ") + .trim(); + } + + getSuffixCode() { + const topLevelNames = new Set(); + for (const token of this.tokens.tokens) { + if ( + !token.isType && + isTopLevelDeclaration(token) && + token.identifierRole !== IdentifierRole.ImportDeclaration + ) { + topLevelNames.add(this.tokens.identifierNameForToken(token)); + } + } + const namesToRegister = Array.from(topLevelNames).map((name) => ({ + variableName: name, + uniqueLocalName: name, + })); + if (this.extractedDefaultExportName) { + namesToRegister.push({ + variableName: this.extractedDefaultExportName, + uniqueLocalName: "default", + }); + } + return ` +;(function () { + var reactHotLoader = require('react-hot-loader').default; + var leaveModule = require('react-hot-loader').leaveModule; + if (!reactHotLoader) { + return; + } +${namesToRegister + .map( + ({variableName, uniqueLocalName}) => + ` reactHotLoader.register(${variableName}, "${uniqueLocalName}", "${this.filePath}");`, + ) + .join("\n")} + leaveModule(module); +})();`; + } + + process() { + return false; + } +} diff --git a/node_modules/sucrase/dist/transformers/RootTransformer.d.ts b/node_modules/sucrase/dist/transformers/RootTransformer.d.ts new file mode 100644 index 00000000..d93a1d4b --- /dev/null +++ b/node_modules/sucrase/dist/transformers/RootTransformer.d.ts @@ -0,0 +1,45 @@ +import { Options, SucraseContext, Transform } from "../index"; +import { ClassInfo } from "../util/getClassInfo"; +export default class RootTransformer { + private transformers; + private nameManager; + private tokens; + private generatedVariables; + private isImportsTransformEnabled; + private isReactHotLoaderTransformEnabled; + constructor(sucraseContext: SucraseContext, transforms: Array, enableLegacyBabel5ModuleInterop: boolean, options: Options); + transform(): string; + processBalancedCode(): void; + processToken(): void; + /** + * Skip past a class with a name and return that name. + */ + processNamedClass(): string; + processClass(): void; + /** + * We want to just handle class fields in all contexts, since TypeScript supports them. Later, + * when some JS implementations support class fields, this should be made optional. + */ + processClassBody(classInfo: ClassInfo, className: string | null): void; + makeConstructorInitCode(constructorInitializerStatements: Array, instanceInitializerNames: Array, className: string): string; + /** + * Normally it's ok to simply remove type tokens, but we need to be more careful when dealing with + * arrow function return types since they can confuse the parser. In that case, we want to move + * the close-paren to the same line as the arrow. + * + * See https://github.com/alangpierce/sucrase/issues/391 for more details. + */ + processPossibleArrowParamEnd(): boolean; + /** + * An async arrow function might be of the form: + * + * async < + * T + * >() => {} + * + * in which case, removing the type parameters will cause a syntax error. Detect this case and + * move the open-paren earlier. + */ + processPossibleAsyncArrowWithTypeParams(): boolean; + processPossibleTypeRange(): boolean; +} diff --git a/node_modules/sucrase/dist/transformers/RootTransformer.js b/node_modules/sucrase/dist/transformers/RootTransformer.js new file mode 100644 index 00000000..e9038b45 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/RootTransformer.js @@ -0,0 +1,397 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); + +var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + +var _getClassInfo = require('../util/getClassInfo'); var _getClassInfo2 = _interopRequireDefault(_getClassInfo); +var _CJSImportTransformer = require('./CJSImportTransformer'); var _CJSImportTransformer2 = _interopRequireDefault(_CJSImportTransformer); +var _ESMImportTransformer = require('./ESMImportTransformer'); var _ESMImportTransformer2 = _interopRequireDefault(_ESMImportTransformer); +var _FlowTransformer = require('./FlowTransformer'); var _FlowTransformer2 = _interopRequireDefault(_FlowTransformer); +var _JSXTransformer = require('./JSXTransformer'); var _JSXTransformer2 = _interopRequireDefault(_JSXTransformer); +var _NumericSeparatorTransformer = require('./NumericSeparatorTransformer'); var _NumericSeparatorTransformer2 = _interopRequireDefault(_NumericSeparatorTransformer); +var _OptionalCatchBindingTransformer = require('./OptionalCatchBindingTransformer'); var _OptionalCatchBindingTransformer2 = _interopRequireDefault(_OptionalCatchBindingTransformer); +var _ReactDisplayNameTransformer = require('./ReactDisplayNameTransformer'); var _ReactDisplayNameTransformer2 = _interopRequireDefault(_ReactDisplayNameTransformer); +var _ReactHotLoaderTransformer = require('./ReactHotLoaderTransformer'); var _ReactHotLoaderTransformer2 = _interopRequireDefault(_ReactHotLoaderTransformer); + +var _TypeScriptTransformer = require('./TypeScriptTransformer'); var _TypeScriptTransformer2 = _interopRequireDefault(_TypeScriptTransformer); + + class RootTransformer { + __init() {this.transformers = []} + + + __init2() {this.generatedVariables = []} + + + + constructor( + sucraseContext, + transforms, + enableLegacyBabel5ModuleInterop, + options, + ) {;RootTransformer.prototype.__init.call(this);RootTransformer.prototype.__init2.call(this); + this.nameManager = sucraseContext.nameManager; + const {tokenProcessor, importProcessor} = sucraseContext; + this.tokens = tokenProcessor; + this.isImportsTransformEnabled = transforms.includes("imports"); + this.isReactHotLoaderTransformEnabled = transforms.includes("react-hot-loader"); + + this.transformers.push(new (0, _NumericSeparatorTransformer2.default)(tokenProcessor)); + this.transformers.push(new (0, _OptionalCatchBindingTransformer2.default)(tokenProcessor, this.nameManager)); + if (transforms.includes("jsx")) { + this.transformers.push( + new (0, _JSXTransformer2.default)(this, tokenProcessor, importProcessor, this.nameManager, options), + ); + this.transformers.push( + new (0, _ReactDisplayNameTransformer2.default)(this, tokenProcessor, importProcessor, options), + ); + } + + let reactHotLoaderTransformer = null; + if (transforms.includes("react-hot-loader")) { + if (!options.filePath) { + throw new Error("filePath is required when using the react-hot-loader transform."); + } + reactHotLoaderTransformer = new (0, _ReactHotLoaderTransformer2.default)(tokenProcessor, options.filePath); + this.transformers.push(reactHotLoaderTransformer); + } + + // Note that we always want to enable the imports transformer, even when the import transform + // itself isn't enabled, since we need to do type-only import pruning for both Flow and + // TypeScript. + if (transforms.includes("imports")) { + if (importProcessor === null) { + throw new Error("Expected non-null importProcessor with imports transform enabled."); + } + this.transformers.push( + new (0, _CJSImportTransformer2.default)( + this, + tokenProcessor, + importProcessor, + this.nameManager, + reactHotLoaderTransformer, + enableLegacyBabel5ModuleInterop, + transforms.includes("typescript"), + ), + ); + } else { + this.transformers.push( + new (0, _ESMImportTransformer2.default)( + tokenProcessor, + this.nameManager, + reactHotLoaderTransformer, + transforms.includes("typescript"), + options, + ), + ); + } + + if (transforms.includes("flow")) { + this.transformers.push(new (0, _FlowTransformer2.default)(this, tokenProcessor)); + } + if (transforms.includes("typescript")) { + this.transformers.push( + new (0, _TypeScriptTransformer2.default)(this, tokenProcessor, transforms.includes("imports")), + ); + } + } + + transform() { + this.tokens.reset(); + this.processBalancedCode(); + const shouldAddUseStrict = this.isImportsTransformEnabled; + // "use strict" always needs to be first, so override the normal transformer order. + let prefix = shouldAddUseStrict ? '"use strict";' : ""; + for (const transformer of this.transformers) { + prefix += transformer.getPrefixCode(); + } + prefix += this.generatedVariables.map((v) => ` var ${v};`).join(""); + let suffix = ""; + for (const transformer of this.transformers) { + suffix += transformer.getSuffixCode(); + } + let code = this.tokens.finish(); + if (code.startsWith("#!")) { + let newlineIndex = code.indexOf("\n"); + if (newlineIndex === -1) { + newlineIndex = code.length; + code += "\n"; + } + return code.slice(0, newlineIndex + 1) + prefix + code.slice(newlineIndex + 1) + suffix; + } else { + return prefix + this.tokens.finish() + suffix; + } + } + + processBalancedCode() { + let braceDepth = 0; + let parenDepth = 0; + while (!this.tokens.isAtEnd()) { + if (this.tokens.matches1(_types.TokenType.braceL) || this.tokens.matches1(_types.TokenType.dollarBraceL)) { + braceDepth++; + } else if (this.tokens.matches1(_types.TokenType.braceR)) { + if (braceDepth === 0) { + return; + } + braceDepth--; + } + if (this.tokens.matches1(_types.TokenType.parenL)) { + parenDepth++; + } else if (this.tokens.matches1(_types.TokenType.parenR)) { + if (parenDepth === 0) { + return; + } + parenDepth--; + } + this.processToken(); + } + } + + processToken() { + if (this.tokens.matches1(_types.TokenType._class)) { + this.processClass(); + return; + } + for (const transformer of this.transformers) { + const wasProcessed = transformer.process(); + if (wasProcessed) { + return; + } + } + this.tokens.copyToken(); + } + + /** + * Skip past a class with a name and return that name. + */ + processNamedClass() { + if (!this.tokens.matches2(_types.TokenType._class, _types.TokenType.name)) { + throw new Error("Expected identifier for exported class name."); + } + const name = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + this.processClass(); + return name; + } + + processClass() { + const classInfo = _getClassInfo2.default.call(void 0, this, this.tokens, this.nameManager); + + // Both static and instance initializers need a class name to use to invoke the initializer, so + // assign to one if necessary. + const needsCommaExpression = + classInfo.headerInfo.isExpression && + classInfo.staticInitializerNames.length + classInfo.instanceInitializerNames.length > 0; + + let className = classInfo.headerInfo.className; + if (needsCommaExpression) { + className = this.nameManager.claimFreeName("_class"); + this.generatedVariables.push(className); + this.tokens.appendCode(` (${className} =`); + } + + const classToken = this.tokens.currentToken(); + const contextId = classToken.contextId; + if (contextId == null) { + throw new Error("Expected class to have a context ID."); + } + this.tokens.copyExpectedToken(_types.TokenType._class); + while (!this.tokens.matchesContextIdAndLabel(_types.TokenType.braceL, contextId)) { + this.processToken(); + } + + this.processClassBody(classInfo, className); + + const staticInitializerStatements = classInfo.staticInitializerNames.map( + (name) => `${className}.${name}()`, + ); + if (needsCommaExpression) { + this.tokens.appendCode( + `, ${staticInitializerStatements.map((s) => `${s}, `).join("")}${className})`, + ); + } else if (classInfo.staticInitializerNames.length > 0) { + this.tokens.appendCode(` ${staticInitializerStatements.map((s) => `${s};`).join(" ")}`); + } + } + + /** + * We want to just handle class fields in all contexts, since TypeScript supports them. Later, + * when some JS implementations support class fields, this should be made optional. + */ + processClassBody(classInfo, className) { + const { + headerInfo, + constructorInsertPos, + constructorInitializerStatements, + fields, + instanceInitializerNames, + rangesToRemove, + } = classInfo; + let fieldIndex = 0; + let rangeToRemoveIndex = 0; + const classContextId = this.tokens.currentToken().contextId; + if (classContextId == null) { + throw new Error("Expected non-null context ID on class."); + } + this.tokens.copyExpectedToken(_types.TokenType.braceL); + if (this.isReactHotLoaderTransformEnabled) { + this.tokens.appendCode( + "__reactstandin__regenerateByEval(key, code) {this[key] = eval(code);}", + ); + } + + const needsConstructorInit = + constructorInitializerStatements.length + instanceInitializerNames.length > 0; + + if (constructorInsertPos === null && needsConstructorInit) { + const constructorInitializersCode = this.makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + ); + if (headerInfo.hasSuperclass) { + const argsName = this.nameManager.claimFreeName("args"); + this.tokens.appendCode( + `constructor(...${argsName}) { super(...${argsName}); ${constructorInitializersCode}; }`, + ); + } else { + this.tokens.appendCode(`constructor() { ${constructorInitializersCode}; }`); + } + } + + while (!this.tokens.matchesContextIdAndLabel(_types.TokenType.braceR, classContextId)) { + if (fieldIndex < fields.length && this.tokens.currentIndex() === fields[fieldIndex].start) { + let needsCloseBrace = false; + if (this.tokens.matches1(_types.TokenType.bracketL)) { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this`); + } else if (this.tokens.matches1(_types.TokenType.string) || this.tokens.matches1(_types.TokenType.num)) { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this[`); + needsCloseBrace = true; + } else { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this.`); + } + while (this.tokens.currentIndex() < fields[fieldIndex].end) { + if (needsCloseBrace && this.tokens.currentIndex() === fields[fieldIndex].equalsIndex) { + this.tokens.appendCode("]"); + } + this.processToken(); + } + this.tokens.appendCode("}"); + fieldIndex++; + } else if ( + rangeToRemoveIndex < rangesToRemove.length && + this.tokens.currentIndex() === rangesToRemove[rangeToRemoveIndex].start + ) { + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < rangesToRemove[rangeToRemoveIndex].end) { + this.tokens.removeToken(); + } + rangeToRemoveIndex++; + } else if (this.tokens.currentIndex() === constructorInsertPos) { + this.tokens.copyToken(); + if (needsConstructorInit) { + this.tokens.appendCode( + `;${this.makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + )};`, + ); + } + this.processToken(); + } else { + this.processToken(); + } + } + this.tokens.copyExpectedToken(_types.TokenType.braceR); + } + + makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + ) { + return [ + ...constructorInitializerStatements, + ...instanceInitializerNames.map((name) => `${className}.prototype.${name}.call(this)`), + ].join(";"); + } + + /** + * Normally it's ok to simply remove type tokens, but we need to be more careful when dealing with + * arrow function return types since they can confuse the parser. In that case, we want to move + * the close-paren to the same line as the arrow. + * + * See https://github.com/alangpierce/sucrase/issues/391 for more details. + */ + processPossibleArrowParamEnd() { + if (this.tokens.matches2(_types.TokenType.parenR, _types.TokenType.colon) && this.tokens.tokenAtRelativeIndex(1).isType) { + let nextNonTypeIndex = this.tokens.currentIndex() + 1; + // Look ahead to see if this is an arrow function or something else. + while (this.tokens.tokens[nextNonTypeIndex].isType) { + nextNonTypeIndex++; + } + if (this.tokens.matches1AtIndex(nextNonTypeIndex, _types.TokenType.arrow)) { + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < nextNonTypeIndex) { + this.tokens.removeToken(); + } + this.tokens.replaceTokenTrimmingLeftWhitespace(") =>"); + return true; + } + } + return false; + } + + /** + * An async arrow function might be of the form: + * + * async < + * T + * >() => {} + * + * in which case, removing the type parameters will cause a syntax error. Detect this case and + * move the open-paren earlier. + */ + processPossibleAsyncArrowWithTypeParams() { + if ( + !this.tokens.matchesContextual(_keywords.ContextualKeyword._async) && + !this.tokens.matches1(_types.TokenType._async) + ) { + return false; + } + const nextToken = this.tokens.tokenAtRelativeIndex(1); + if (nextToken.type !== _types.TokenType.lessThan || !nextToken.isType) { + return false; + } + + let nextNonTypeIndex = this.tokens.currentIndex() + 1; + // Look ahead to see if this is an arrow function or something else. + while (this.tokens.tokens[nextNonTypeIndex].isType) { + nextNonTypeIndex++; + } + if (this.tokens.matches1AtIndex(nextNonTypeIndex, _types.TokenType.parenL)) { + this.tokens.replaceToken("async ("); + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < nextNonTypeIndex) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + // We ate a ( token, so we need to process the tokens in between and then the ) token so that + // we remain balanced. + this.processBalancedCode(); + this.processToken(); + return true; + } + return false; + } + + processPossibleTypeRange() { + if (this.tokens.currentToken().isType) { + this.tokens.removeInitialToken(); + while (this.tokens.currentToken().isType) { + this.tokens.removeToken(); + } + return true; + } + return false; + } +} exports.default = RootTransformer; diff --git a/node_modules/sucrase/dist/transformers/RootTransformer.mjs b/node_modules/sucrase/dist/transformers/RootTransformer.mjs new file mode 100644 index 00000000..04355f58 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/RootTransformer.mjs @@ -0,0 +1,397 @@ + + +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + +import getClassInfo, {} from "../util/getClassInfo"; +import CJSImportTransformer from "./CJSImportTransformer"; +import ESMImportTransformer from "./ESMImportTransformer"; +import FlowTransformer from "./FlowTransformer"; +import JSXTransformer from "./JSXTransformer"; +import NumericSeparatorTransformer from "./NumericSeparatorTransformer"; +import OptionalCatchBindingTransformer from "./OptionalCatchBindingTransformer"; +import ReactDisplayNameTransformer from "./ReactDisplayNameTransformer"; +import ReactHotLoaderTransformer from "./ReactHotLoaderTransformer"; + +import TypeScriptTransformer from "./TypeScriptTransformer"; + +export default class RootTransformer { + __init() {this.transformers = []} + + + __init2() {this.generatedVariables = []} + + + + constructor( + sucraseContext, + transforms, + enableLegacyBabel5ModuleInterop, + options, + ) {;RootTransformer.prototype.__init.call(this);RootTransformer.prototype.__init2.call(this); + this.nameManager = sucraseContext.nameManager; + const {tokenProcessor, importProcessor} = sucraseContext; + this.tokens = tokenProcessor; + this.isImportsTransformEnabled = transforms.includes("imports"); + this.isReactHotLoaderTransformEnabled = transforms.includes("react-hot-loader"); + + this.transformers.push(new NumericSeparatorTransformer(tokenProcessor)); + this.transformers.push(new OptionalCatchBindingTransformer(tokenProcessor, this.nameManager)); + if (transforms.includes("jsx")) { + this.transformers.push( + new JSXTransformer(this, tokenProcessor, importProcessor, this.nameManager, options), + ); + this.transformers.push( + new ReactDisplayNameTransformer(this, tokenProcessor, importProcessor, options), + ); + } + + let reactHotLoaderTransformer = null; + if (transforms.includes("react-hot-loader")) { + if (!options.filePath) { + throw new Error("filePath is required when using the react-hot-loader transform."); + } + reactHotLoaderTransformer = new ReactHotLoaderTransformer(tokenProcessor, options.filePath); + this.transformers.push(reactHotLoaderTransformer); + } + + // Note that we always want to enable the imports transformer, even when the import transform + // itself isn't enabled, since we need to do type-only import pruning for both Flow and + // TypeScript. + if (transforms.includes("imports")) { + if (importProcessor === null) { + throw new Error("Expected non-null importProcessor with imports transform enabled."); + } + this.transformers.push( + new CJSImportTransformer( + this, + tokenProcessor, + importProcessor, + this.nameManager, + reactHotLoaderTransformer, + enableLegacyBabel5ModuleInterop, + transforms.includes("typescript"), + ), + ); + } else { + this.transformers.push( + new ESMImportTransformer( + tokenProcessor, + this.nameManager, + reactHotLoaderTransformer, + transforms.includes("typescript"), + options, + ), + ); + } + + if (transforms.includes("flow")) { + this.transformers.push(new FlowTransformer(this, tokenProcessor)); + } + if (transforms.includes("typescript")) { + this.transformers.push( + new TypeScriptTransformer(this, tokenProcessor, transforms.includes("imports")), + ); + } + } + + transform() { + this.tokens.reset(); + this.processBalancedCode(); + const shouldAddUseStrict = this.isImportsTransformEnabled; + // "use strict" always needs to be first, so override the normal transformer order. + let prefix = shouldAddUseStrict ? '"use strict";' : ""; + for (const transformer of this.transformers) { + prefix += transformer.getPrefixCode(); + } + prefix += this.generatedVariables.map((v) => ` var ${v};`).join(""); + let suffix = ""; + for (const transformer of this.transformers) { + suffix += transformer.getSuffixCode(); + } + let code = this.tokens.finish(); + if (code.startsWith("#!")) { + let newlineIndex = code.indexOf("\n"); + if (newlineIndex === -1) { + newlineIndex = code.length; + code += "\n"; + } + return code.slice(0, newlineIndex + 1) + prefix + code.slice(newlineIndex + 1) + suffix; + } else { + return prefix + this.tokens.finish() + suffix; + } + } + + processBalancedCode() { + let braceDepth = 0; + let parenDepth = 0; + while (!this.tokens.isAtEnd()) { + if (this.tokens.matches1(tt.braceL) || this.tokens.matches1(tt.dollarBraceL)) { + braceDepth++; + } else if (this.tokens.matches1(tt.braceR)) { + if (braceDepth === 0) { + return; + } + braceDepth--; + } + if (this.tokens.matches1(tt.parenL)) { + parenDepth++; + } else if (this.tokens.matches1(tt.parenR)) { + if (parenDepth === 0) { + return; + } + parenDepth--; + } + this.processToken(); + } + } + + processToken() { + if (this.tokens.matches1(tt._class)) { + this.processClass(); + return; + } + for (const transformer of this.transformers) { + const wasProcessed = transformer.process(); + if (wasProcessed) { + return; + } + } + this.tokens.copyToken(); + } + + /** + * Skip past a class with a name and return that name. + */ + processNamedClass() { + if (!this.tokens.matches2(tt._class, tt.name)) { + throw new Error("Expected identifier for exported class name."); + } + const name = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + this.processClass(); + return name; + } + + processClass() { + const classInfo = getClassInfo(this, this.tokens, this.nameManager); + + // Both static and instance initializers need a class name to use to invoke the initializer, so + // assign to one if necessary. + const needsCommaExpression = + classInfo.headerInfo.isExpression && + classInfo.staticInitializerNames.length + classInfo.instanceInitializerNames.length > 0; + + let className = classInfo.headerInfo.className; + if (needsCommaExpression) { + className = this.nameManager.claimFreeName("_class"); + this.generatedVariables.push(className); + this.tokens.appendCode(` (${className} =`); + } + + const classToken = this.tokens.currentToken(); + const contextId = classToken.contextId; + if (contextId == null) { + throw new Error("Expected class to have a context ID."); + } + this.tokens.copyExpectedToken(tt._class); + while (!this.tokens.matchesContextIdAndLabel(tt.braceL, contextId)) { + this.processToken(); + } + + this.processClassBody(classInfo, className); + + const staticInitializerStatements = classInfo.staticInitializerNames.map( + (name) => `${className}.${name}()`, + ); + if (needsCommaExpression) { + this.tokens.appendCode( + `, ${staticInitializerStatements.map((s) => `${s}, `).join("")}${className})`, + ); + } else if (classInfo.staticInitializerNames.length > 0) { + this.tokens.appendCode(` ${staticInitializerStatements.map((s) => `${s};`).join(" ")}`); + } + } + + /** + * We want to just handle class fields in all contexts, since TypeScript supports them. Later, + * when some JS implementations support class fields, this should be made optional. + */ + processClassBody(classInfo, className) { + const { + headerInfo, + constructorInsertPos, + constructorInitializerStatements, + fields, + instanceInitializerNames, + rangesToRemove, + } = classInfo; + let fieldIndex = 0; + let rangeToRemoveIndex = 0; + const classContextId = this.tokens.currentToken().contextId; + if (classContextId == null) { + throw new Error("Expected non-null context ID on class."); + } + this.tokens.copyExpectedToken(tt.braceL); + if (this.isReactHotLoaderTransformEnabled) { + this.tokens.appendCode( + "__reactstandin__regenerateByEval(key, code) {this[key] = eval(code);}", + ); + } + + const needsConstructorInit = + constructorInitializerStatements.length + instanceInitializerNames.length > 0; + + if (constructorInsertPos === null && needsConstructorInit) { + const constructorInitializersCode = this.makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + ); + if (headerInfo.hasSuperclass) { + const argsName = this.nameManager.claimFreeName("args"); + this.tokens.appendCode( + `constructor(...${argsName}) { super(...${argsName}); ${constructorInitializersCode}; }`, + ); + } else { + this.tokens.appendCode(`constructor() { ${constructorInitializersCode}; }`); + } + } + + while (!this.tokens.matchesContextIdAndLabel(tt.braceR, classContextId)) { + if (fieldIndex < fields.length && this.tokens.currentIndex() === fields[fieldIndex].start) { + let needsCloseBrace = false; + if (this.tokens.matches1(tt.bracketL)) { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this`); + } else if (this.tokens.matches1(tt.string) || this.tokens.matches1(tt.num)) { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this[`); + needsCloseBrace = true; + } else { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this.`); + } + while (this.tokens.currentIndex() < fields[fieldIndex].end) { + if (needsCloseBrace && this.tokens.currentIndex() === fields[fieldIndex].equalsIndex) { + this.tokens.appendCode("]"); + } + this.processToken(); + } + this.tokens.appendCode("}"); + fieldIndex++; + } else if ( + rangeToRemoveIndex < rangesToRemove.length && + this.tokens.currentIndex() === rangesToRemove[rangeToRemoveIndex].start + ) { + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < rangesToRemove[rangeToRemoveIndex].end) { + this.tokens.removeToken(); + } + rangeToRemoveIndex++; + } else if (this.tokens.currentIndex() === constructorInsertPos) { + this.tokens.copyToken(); + if (needsConstructorInit) { + this.tokens.appendCode( + `;${this.makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + )};`, + ); + } + this.processToken(); + } else { + this.processToken(); + } + } + this.tokens.copyExpectedToken(tt.braceR); + } + + makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + ) { + return [ + ...constructorInitializerStatements, + ...instanceInitializerNames.map((name) => `${className}.prototype.${name}.call(this)`), + ].join(";"); + } + + /** + * Normally it's ok to simply remove type tokens, but we need to be more careful when dealing with + * arrow function return types since they can confuse the parser. In that case, we want to move + * the close-paren to the same line as the arrow. + * + * See https://github.com/alangpierce/sucrase/issues/391 for more details. + */ + processPossibleArrowParamEnd() { + if (this.tokens.matches2(tt.parenR, tt.colon) && this.tokens.tokenAtRelativeIndex(1).isType) { + let nextNonTypeIndex = this.tokens.currentIndex() + 1; + // Look ahead to see if this is an arrow function or something else. + while (this.tokens.tokens[nextNonTypeIndex].isType) { + nextNonTypeIndex++; + } + if (this.tokens.matches1AtIndex(nextNonTypeIndex, tt.arrow)) { + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < nextNonTypeIndex) { + this.tokens.removeToken(); + } + this.tokens.replaceTokenTrimmingLeftWhitespace(") =>"); + return true; + } + } + return false; + } + + /** + * An async arrow function might be of the form: + * + * async < + * T + * >() => {} + * + * in which case, removing the type parameters will cause a syntax error. Detect this case and + * move the open-paren earlier. + */ + processPossibleAsyncArrowWithTypeParams() { + if ( + !this.tokens.matchesContextual(ContextualKeyword._async) && + !this.tokens.matches1(tt._async) + ) { + return false; + } + const nextToken = this.tokens.tokenAtRelativeIndex(1); + if (nextToken.type !== tt.lessThan || !nextToken.isType) { + return false; + } + + let nextNonTypeIndex = this.tokens.currentIndex() + 1; + // Look ahead to see if this is an arrow function or something else. + while (this.tokens.tokens[nextNonTypeIndex].isType) { + nextNonTypeIndex++; + } + if (this.tokens.matches1AtIndex(nextNonTypeIndex, tt.parenL)) { + this.tokens.replaceToken("async ("); + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < nextNonTypeIndex) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + // We ate a ( token, so we need to process the tokens in between and then the ) token so that + // we remain balanced. + this.processBalancedCode(); + this.processToken(); + return true; + } + return false; + } + + processPossibleTypeRange() { + if (this.tokens.currentToken().isType) { + this.tokens.removeInitialToken(); + while (this.tokens.currentToken().isType) { + this.tokens.removeToken(); + } + return true; + } + return false; + } +} diff --git a/node_modules/sucrase/dist/transformers/Transformer.d.ts b/node_modules/sucrase/dist/transformers/Transformer.d.ts new file mode 100644 index 00000000..4107fc84 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/Transformer.d.ts @@ -0,0 +1,5 @@ +export default abstract class Transformer { + abstract process(): boolean; + getPrefixCode(): string; + getSuffixCode(): string; +} diff --git a/node_modules/sucrase/dist/transformers/Transformer.js b/node_modules/sucrase/dist/transformers/Transformer.js new file mode 100644 index 00000000..1d8fda0f --- /dev/null +++ b/node_modules/sucrase/dist/transformers/Transformer.js @@ -0,0 +1,12 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); class Transformer { + // Return true if anything was processed, false otherwise. + + + getPrefixCode() { + return ""; + } + + getSuffixCode() { + return ""; + } +} exports.default = Transformer; diff --git a/node_modules/sucrase/dist/transformers/Transformer.mjs b/node_modules/sucrase/dist/transformers/Transformer.mjs new file mode 100644 index 00000000..5127bef5 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/Transformer.mjs @@ -0,0 +1,12 @@ +export default class Transformer { + // Return true if anything was processed, false otherwise. + + + getPrefixCode() { + return ""; + } + + getSuffixCode() { + return ""; + } +} diff --git a/node_modules/sucrase/dist/transformers/TypeScriptTransformer.d.ts b/node_modules/sucrase/dist/transformers/TypeScriptTransformer.d.ts new file mode 100644 index 00000000..d48747b4 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/TypeScriptTransformer.d.ts @@ -0,0 +1,17 @@ +import TokenProcessor from "../TokenProcessor"; +import RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +export default class TypeScriptTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly isImportsTransformEnabled: boolean; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, isImportsTransformEnabled: boolean); + process(): boolean; + processEnum(isExport?: boolean): void; + /** + * Rather than try to compute the actual enum values at compile time, we just create variables for + * each one and let everything evaluate at runtime. There's some additional complexity due to + * handling string literal names, including ones that happen to be valid identifiers. + */ + processEnumBody(enumName: string): void; +} diff --git a/node_modules/sucrase/dist/transformers/TypeScriptTransformer.js b/node_modules/sucrase/dist/transformers/TypeScriptTransformer.js new file mode 100644 index 00000000..0ddb38d1 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/TypeScriptTransformer.js @@ -0,0 +1,156 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _types = require('../parser/tokenizer/types'); + +var _isIdentifier = require('../util/isIdentifier'); var _isIdentifier2 = _interopRequireDefault(_isIdentifier); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class TypeScriptTransformer extends _Transformer2.default { + constructor( + rootTransformer, + tokens, + isImportsTransformEnabled, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.isImportsTransformEnabled = isImportsTransformEnabled;; + } + + process() { + if ( + this.rootTransformer.processPossibleArrowParamEnd() || + this.rootTransformer.processPossibleAsyncArrowWithTypeParams() || + this.rootTransformer.processPossibleTypeRange() + ) { + return true; + } + if ( + this.tokens.matches1(_types.TokenType._public) || + this.tokens.matches1(_types.TokenType._protected) || + this.tokens.matches1(_types.TokenType._private) || + this.tokens.matches1(_types.TokenType._abstract) || + this.tokens.matches1(_types.TokenType._readonly) || + this.tokens.matches1(_types.TokenType.nonNullAssertion) + ) { + this.tokens.removeInitialToken(); + return true; + } + if (this.tokens.matches1(_types.TokenType._enum) || this.tokens.matches2(_types.TokenType._const, _types.TokenType._enum)) { + this.processEnum(); + return true; + } + if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._enum) || + this.tokens.matches3(_types.TokenType._export, _types.TokenType._const, _types.TokenType._enum) + ) { + this.processEnum(true); + return true; + } + return false; + } + + processEnum(isExport = false) { + // We might have "export const enum", so just remove all relevant tokens. + this.tokens.removeInitialToken(); + while (this.tokens.matches1(_types.TokenType._const) || this.tokens.matches1(_types.TokenType._enum)) { + this.tokens.removeToken(); + } + const enumName = this.tokens.identifierName(); + this.tokens.removeToken(); + if (isExport && !this.isImportsTransformEnabled) { + this.tokens.appendCode("export "); + } + this.tokens.appendCode(`var ${enumName}; (function (${enumName})`); + this.tokens.copyExpectedToken(_types.TokenType.braceL); + this.processEnumBody(enumName); + this.tokens.copyExpectedToken(_types.TokenType.braceR); + if (isExport && this.isImportsTransformEnabled) { + this.tokens.appendCode(`)(${enumName} || (exports.${enumName} = ${enumName} = {}));`); + } else { + this.tokens.appendCode(`)(${enumName} || (${enumName} = {}));`); + } + } + + /** + * Rather than try to compute the actual enum values at compile time, we just create variables for + * each one and let everything evaluate at runtime. There's some additional complexity due to + * handling string literal names, including ones that happen to be valid identifiers. + */ + processEnumBody(enumName) { + let isPreviousValidIdentifier = false; + let lastValueReference = null; + while (true) { + if (this.tokens.matches1(_types.TokenType.braceR)) { + break; + } + const nameToken = this.tokens.currentToken(); + let name; + let nameStringCode; + if (nameToken.type === _types.TokenType.name) { + name = this.tokens.identifierNameForToken(nameToken); + nameStringCode = `"${name}"`; + } else if (nameToken.type === _types.TokenType.string) { + name = this.tokens.stringValueForToken(nameToken); + nameStringCode = this.tokens.code.slice(nameToken.start, nameToken.end); + } else { + throw new Error("Expected name or string at beginning of enum element."); + } + const isValidIdentifier = _isIdentifier2.default.call(void 0, name); + this.tokens.removeInitialToken(); + + let valueIsString; + let valueCode; + + if (this.tokens.matches1(_types.TokenType.eq)) { + const rhsEndIndex = this.tokens.currentToken().rhsEndIndex; + if (rhsEndIndex == null) { + throw new Error("Expected rhsEndIndex on enum assign."); + } + this.tokens.removeToken(); + if ( + this.tokens.matches2(_types.TokenType.string, _types.TokenType.comma) || + this.tokens.matches2(_types.TokenType.string, _types.TokenType.braceR) + ) { + valueIsString = true; + } + const startToken = this.tokens.currentToken(); + while (this.tokens.currentIndex() < rhsEndIndex) { + this.tokens.removeToken(); + } + valueCode = this.tokens.code.slice( + startToken.start, + this.tokens.tokenAtRelativeIndex(-1).end, + ); + } else { + valueIsString = false; + if (lastValueReference != null) { + if (isPreviousValidIdentifier) { + valueCode = `${lastValueReference} + 1`; + } else { + valueCode = `(${lastValueReference}) + 1`; + } + } else { + valueCode = "0"; + } + } + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + + let valueReference; + if (isValidIdentifier) { + this.tokens.appendCode(`const ${name} = ${valueCode}; `); + valueReference = name; + } else { + valueReference = valueCode; + } + + if (valueIsString) { + this.tokens.appendCode(`${enumName}[${nameStringCode}] = ${valueReference};`); + } else { + this.tokens.appendCode( + `${enumName}[${enumName}[${nameStringCode}] = ${valueReference}] = ${nameStringCode};`, + ); + } + lastValueReference = valueReference; + isPreviousValidIdentifier = isValidIdentifier; + } + } +} exports.default = TypeScriptTransformer; diff --git a/node_modules/sucrase/dist/transformers/TypeScriptTransformer.mjs b/node_modules/sucrase/dist/transformers/TypeScriptTransformer.mjs new file mode 100644 index 00000000..5972536b --- /dev/null +++ b/node_modules/sucrase/dist/transformers/TypeScriptTransformer.mjs @@ -0,0 +1,156 @@ +import {TokenType as tt} from "../parser/tokenizer/types"; + +import isIdentifier from "../util/isIdentifier"; + +import Transformer from "./Transformer"; + +export default class TypeScriptTransformer extends Transformer { + constructor( + rootTransformer, + tokens, + isImportsTransformEnabled, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.isImportsTransformEnabled = isImportsTransformEnabled;; + } + + process() { + if ( + this.rootTransformer.processPossibleArrowParamEnd() || + this.rootTransformer.processPossibleAsyncArrowWithTypeParams() || + this.rootTransformer.processPossibleTypeRange() + ) { + return true; + } + if ( + this.tokens.matches1(tt._public) || + this.tokens.matches1(tt._protected) || + this.tokens.matches1(tt._private) || + this.tokens.matches1(tt._abstract) || + this.tokens.matches1(tt._readonly) || + this.tokens.matches1(tt.nonNullAssertion) + ) { + this.tokens.removeInitialToken(); + return true; + } + if (this.tokens.matches1(tt._enum) || this.tokens.matches2(tt._const, tt._enum)) { + this.processEnum(); + return true; + } + if ( + this.tokens.matches2(tt._export, tt._enum) || + this.tokens.matches3(tt._export, tt._const, tt._enum) + ) { + this.processEnum(true); + return true; + } + return false; + } + + processEnum(isExport = false) { + // We might have "export const enum", so just remove all relevant tokens. + this.tokens.removeInitialToken(); + while (this.tokens.matches1(tt._const) || this.tokens.matches1(tt._enum)) { + this.tokens.removeToken(); + } + const enumName = this.tokens.identifierName(); + this.tokens.removeToken(); + if (isExport && !this.isImportsTransformEnabled) { + this.tokens.appendCode("export "); + } + this.tokens.appendCode(`var ${enumName}; (function (${enumName})`); + this.tokens.copyExpectedToken(tt.braceL); + this.processEnumBody(enumName); + this.tokens.copyExpectedToken(tt.braceR); + if (isExport && this.isImportsTransformEnabled) { + this.tokens.appendCode(`)(${enumName} || (exports.${enumName} = ${enumName} = {}));`); + } else { + this.tokens.appendCode(`)(${enumName} || (${enumName} = {}));`); + } + } + + /** + * Rather than try to compute the actual enum values at compile time, we just create variables for + * each one and let everything evaluate at runtime. There's some additional complexity due to + * handling string literal names, including ones that happen to be valid identifiers. + */ + processEnumBody(enumName) { + let isPreviousValidIdentifier = false; + let lastValueReference = null; + while (true) { + if (this.tokens.matches1(tt.braceR)) { + break; + } + const nameToken = this.tokens.currentToken(); + let name; + let nameStringCode; + if (nameToken.type === tt.name) { + name = this.tokens.identifierNameForToken(nameToken); + nameStringCode = `"${name}"`; + } else if (nameToken.type === tt.string) { + name = this.tokens.stringValueForToken(nameToken); + nameStringCode = this.tokens.code.slice(nameToken.start, nameToken.end); + } else { + throw new Error("Expected name or string at beginning of enum element."); + } + const isValidIdentifier = isIdentifier(name); + this.tokens.removeInitialToken(); + + let valueIsString; + let valueCode; + + if (this.tokens.matches1(tt.eq)) { + const rhsEndIndex = this.tokens.currentToken().rhsEndIndex; + if (rhsEndIndex == null) { + throw new Error("Expected rhsEndIndex on enum assign."); + } + this.tokens.removeToken(); + if ( + this.tokens.matches2(tt.string, tt.comma) || + this.tokens.matches2(tt.string, tt.braceR) + ) { + valueIsString = true; + } + const startToken = this.tokens.currentToken(); + while (this.tokens.currentIndex() < rhsEndIndex) { + this.tokens.removeToken(); + } + valueCode = this.tokens.code.slice( + startToken.start, + this.tokens.tokenAtRelativeIndex(-1).end, + ); + } else { + valueIsString = false; + if (lastValueReference != null) { + if (isPreviousValidIdentifier) { + valueCode = `${lastValueReference} + 1`; + } else { + valueCode = `(${lastValueReference}) + 1`; + } + } else { + valueCode = "0"; + } + } + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + + let valueReference; + if (isValidIdentifier) { + this.tokens.appendCode(`const ${name} = ${valueCode}; `); + valueReference = name; + } else { + valueReference = valueCode; + } + + if (valueIsString) { + this.tokens.appendCode(`${enumName}[${nameStringCode}] = ${valueReference};`); + } else { + this.tokens.appendCode( + `${enumName}[${enumName}[${nameStringCode}] = ${valueReference}] = ${nameStringCode};`, + ); + } + lastValueReference = valueReference; + isPreviousValidIdentifier = isValidIdentifier; + } + } +} diff --git a/node_modules/sucrase/dist/util/elideImportEquals.d.ts b/node_modules/sucrase/dist/util/elideImportEquals.d.ts new file mode 100644 index 00000000..92af704b --- /dev/null +++ b/node_modules/sucrase/dist/util/elideImportEquals.d.ts @@ -0,0 +1,2 @@ +import TokenProcessor from "../TokenProcessor"; +export default function elideImportEquals(tokens: TokenProcessor): void; diff --git a/node_modules/sucrase/dist/util/elideImportEquals.js b/node_modules/sucrase/dist/util/elideImportEquals.js new file mode 100644 index 00000000..b5ba225a --- /dev/null +++ b/node_modules/sucrase/dist/util/elideImportEquals.js @@ -0,0 +1,29 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _types = require('../parser/tokenizer/types'); + + + function elideImportEquals(tokens) { + // import + tokens.removeInitialToken(); + // name + tokens.removeToken(); + // = + tokens.removeToken(); + // name or require + tokens.removeToken(); + // Handle either `import A = require('A')` or `import A = B.C.D`. + if (tokens.matches1(_types.TokenType.parenL)) { + // ( + tokens.removeToken(); + // path string + tokens.removeToken(); + // ) + tokens.removeToken(); + } else { + while (tokens.matches1(_types.TokenType.dot)) { + // . + tokens.removeToken(); + // name + tokens.removeToken(); + } + } +} exports.default = elideImportEquals; diff --git a/node_modules/sucrase/dist/util/elideImportEquals.mjs b/node_modules/sucrase/dist/util/elideImportEquals.mjs new file mode 100644 index 00000000..6b18a7af --- /dev/null +++ b/node_modules/sucrase/dist/util/elideImportEquals.mjs @@ -0,0 +1,29 @@ +import {TokenType as tt} from "../parser/tokenizer/types"; + + +export default function elideImportEquals(tokens) { + // import + tokens.removeInitialToken(); + // name + tokens.removeToken(); + // = + tokens.removeToken(); + // name or require + tokens.removeToken(); + // Handle either `import A = require('A')` or `import A = B.C.D`. + if (tokens.matches1(tt.parenL)) { + // ( + tokens.removeToken(); + // path string + tokens.removeToken(); + // ) + tokens.removeToken(); + } else { + while (tokens.matches1(tt.dot)) { + // . + tokens.removeToken(); + // name + tokens.removeToken(); + } + } +} diff --git a/node_modules/sucrase/dist/util/formatTokens.d.ts b/node_modules/sucrase/dist/util/formatTokens.d.ts new file mode 100644 index 00000000..b392cd73 --- /dev/null +++ b/node_modules/sucrase/dist/util/formatTokens.d.ts @@ -0,0 +1,2 @@ +import { Token } from "../parser/tokenizer"; +export default function formatTokens(code: string, tokens: Array): string; diff --git a/node_modules/sucrase/dist/util/formatTokens.js b/node_modules/sucrase/dist/util/formatTokens.js new file mode 100644 index 00000000..363aa706 --- /dev/null +++ b/node_modules/sucrase/dist/util/formatTokens.js @@ -0,0 +1,71 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _linesandcolumns = require('lines-and-columns'); var _linesandcolumns2 = _interopRequireDefault(_linesandcolumns); + +var _types = require('../parser/tokenizer/types'); + + function formatTokens(code, tokens) { + if (tokens.length === 0) { + return ""; + } + + const tokenKeys = Object.keys(tokens[0]).filter( + (k) => k !== "type" && k !== "value" && k !== "start" && k !== "end" && k !== "loc", + ); + const typeKeys = Object.keys(tokens[0].type).filter((k) => k !== "label" && k !== "keyword"); + + const headings = ["Location", "Label", "Raw", ...tokenKeys, ...typeKeys]; + + const lines = new (0, _linesandcolumns2.default)(code); + const rows = [headings, ...tokens.map(getTokenComponents)]; + const padding = headings.map(() => 0); + for (const components of rows) { + for (let i = 0; i < components.length; i++) { + padding[i] = Math.max(padding[i], components[i].length); + } + } + return rows + .map((components) => components.map((component, i) => component.padEnd(padding[i])).join(" ")) + .join("\n"); + + function getTokenComponents(token) { + const raw = code.slice(token.start, token.end); + return [ + formatRange(token.start, token.end), + _types.formatTokenType.call(void 0, token.type), + truncate(String(raw), 14), + ...tokenKeys.map((key) => formatValue(token[key], key)), + ...typeKeys.map((key) => formatValue(token.type[key], key)), + ]; + } + + // tslint:disable-next-line no-any + function formatValue(value, key) { + if (value === true) { + return key; + } else if (value === false || value === null) { + return ""; + } else { + return String(value); + } + } + + function formatRange(start, end) { + return `${formatPos(start)}-${formatPos(end)}`; + } + + function formatPos(pos) { + const location = lines.locationForIndex(pos); + if (!location) { + return "Unknown"; + } else { + return `${location.line + 1}:${location.column + 1}`; + } + } +} exports.default = formatTokens; + +function truncate(s, length) { + if (s.length > length) { + return `${s.slice(0, length - 3)}...`; + } else { + return s; + } +} diff --git a/node_modules/sucrase/dist/util/formatTokens.mjs b/node_modules/sucrase/dist/util/formatTokens.mjs new file mode 100644 index 00000000..d138c53f --- /dev/null +++ b/node_modules/sucrase/dist/util/formatTokens.mjs @@ -0,0 +1,71 @@ +import LinesAndColumns from "lines-and-columns"; + +import {formatTokenType} from "../parser/tokenizer/types"; + +export default function formatTokens(code, tokens) { + if (tokens.length === 0) { + return ""; + } + + const tokenKeys = Object.keys(tokens[0]).filter( + (k) => k !== "type" && k !== "value" && k !== "start" && k !== "end" && k !== "loc", + ); + const typeKeys = Object.keys(tokens[0].type).filter((k) => k !== "label" && k !== "keyword"); + + const headings = ["Location", "Label", "Raw", ...tokenKeys, ...typeKeys]; + + const lines = new LinesAndColumns(code); + const rows = [headings, ...tokens.map(getTokenComponents)]; + const padding = headings.map(() => 0); + for (const components of rows) { + for (let i = 0; i < components.length; i++) { + padding[i] = Math.max(padding[i], components[i].length); + } + } + return rows + .map((components) => components.map((component, i) => component.padEnd(padding[i])).join(" ")) + .join("\n"); + + function getTokenComponents(token) { + const raw = code.slice(token.start, token.end); + return [ + formatRange(token.start, token.end), + formatTokenType(token.type), + truncate(String(raw), 14), + ...tokenKeys.map((key) => formatValue(token[key], key)), + ...typeKeys.map((key) => formatValue(token.type[key], key)), + ]; + } + + // tslint:disable-next-line no-any + function formatValue(value, key) { + if (value === true) { + return key; + } else if (value === false || value === null) { + return ""; + } else { + return String(value); + } + } + + function formatRange(start, end) { + return `${formatPos(start)}-${formatPos(end)}`; + } + + function formatPos(pos) { + const location = lines.locationForIndex(pos); + if (!location) { + return "Unknown"; + } else { + return `${location.line + 1}:${location.column + 1}`; + } + } +} + +function truncate(s, length) { + if (s.length > length) { + return `${s.slice(0, length - 3)}...`; + } else { + return s; + } +} diff --git a/node_modules/sucrase/dist/util/getClassInfo.d.ts b/node_modules/sucrase/dist/util/getClassInfo.d.ts new file mode 100644 index 00000000..e20646e5 --- /dev/null +++ b/node_modules/sucrase/dist/util/getClassInfo.d.ts @@ -0,0 +1,34 @@ +import NameManager from "../NameManager"; +import TokenProcessor from "../TokenProcessor"; +import RootTransformer from "../transformers/RootTransformer"; +export interface ClassHeaderInfo { + isExpression: boolean; + className: string | null; + hasSuperclass: boolean; +} +export interface TokenRange { + start: number; + end: number; +} +export interface FieldInfo extends TokenRange { + equalsIndex: number; + initializerName: string; +} +/** + * Information about a class returned to inform the implementation of class fields and constructor + * initializers. + */ +export interface ClassInfo { + headerInfo: ClassHeaderInfo; + constructorInitializerStatements: Array; + instanceInitializerNames: Array; + staticInitializerNames: Array; + constructorInsertPos: number | null; + fields: Array; + rangesToRemove: Array; +} +/** + * Get information about the class fields for this class, given a token processor pointing to the + * open-brace at the start of the class. + */ +export default function getClassInfo(rootTransformer: RootTransformer, tokens: TokenProcessor, nameManager: NameManager): ClassInfo; diff --git a/node_modules/sucrase/dist/util/getClassInfo.js b/node_modules/sucrase/dist/util/getClassInfo.js new file mode 100644 index 00000000..0b351f74 --- /dev/null +++ b/node_modules/sucrase/dist/util/getClassInfo.js @@ -0,0 +1,281 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); + +var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +/** + * Get information about the class fields for this class, given a token processor pointing to the + * open-brace at the start of the class. + */ + function getClassInfo( + rootTransformer, + tokens, + nameManager, +) { + const snapshot = tokens.snapshot(); + + const headerInfo = processClassHeader(tokens); + + let constructorInitializerStatements = []; + const instanceInitializerNames = []; + const staticInitializerNames = []; + let constructorInsertPos = null; + const fields = []; + const rangesToRemove = []; + + const classContextId = tokens.currentToken().contextId; + if (classContextId == null) { + throw new Error("Expected non-null class context ID on class open-brace."); + } + + tokens.nextToken(); + while (!tokens.matchesContextIdAndLabel(_types.TokenType.braceR, classContextId)) { + if (tokens.matchesContextual(_keywords.ContextualKeyword._constructor) && !tokens.currentToken().isType) { + ({constructorInitializerStatements, constructorInsertPos} = processConstructor(tokens)); + } else if (tokens.matches1(_types.TokenType.semi)) { + rangesToRemove.push({start: tokens.currentIndex(), end: tokens.currentIndex() + 1}); + tokens.nextToken(); + } else if (tokens.currentToken().isType) { + tokens.nextToken(); + } else { + // Either a method or a field. Skip to the identifier part. + const statementStartIndex = tokens.currentIndex(); + let isStatic = false; + while (isAccessModifier(tokens.currentToken())) { + if (tokens.matches1(_types.TokenType._static)) { + isStatic = true; + } + tokens.nextToken(); + } + if ( + tokens.matchesContextual(_keywords.ContextualKeyword._constructor) && + !tokens.currentToken().isType + ) { + ({constructorInitializerStatements, constructorInsertPos} = processConstructor(tokens)); + continue; + } + const nameStartIndex = tokens.currentIndex(); + skipFieldName(tokens); + if (tokens.matches1(_types.TokenType.lessThan) || tokens.matches1(_types.TokenType.parenL)) { + // This is a method, so just skip to the next method/field. To do that, we seek forward to + // the next start of a class name (either an open bracket or an identifier, or the closing + // curly brace), then seek backward to include any access modifiers. + while (tokens.currentToken().contextId !== classContextId) { + tokens.nextToken(); + } + while (isAccessModifier(tokens.tokenAtRelativeIndex(-1))) { + tokens.previousToken(); + } + continue; + } + // There might be a type annotation that we need to skip. + while (tokens.currentToken().isType) { + tokens.nextToken(); + } + if (tokens.matches1(_types.TokenType.eq)) { + const equalsIndex = tokens.currentIndex(); + // This is an initializer, so we need to wrap in an initializer method. + const valueEnd = tokens.currentToken().rhsEndIndex; + if (valueEnd == null) { + throw new Error("Expected rhsEndIndex on class field assignment."); + } + tokens.nextToken(); + while (tokens.currentIndex() < valueEnd) { + rootTransformer.processToken(); + } + let initializerName; + if (isStatic) { + initializerName = nameManager.claimFreeName("__initStatic"); + staticInitializerNames.push(initializerName); + } else { + initializerName = nameManager.claimFreeName("__init"); + instanceInitializerNames.push(initializerName); + } + // Fields start at the name, so `static x = 1;` has a field range of `x = 1;`. + fields.push({ + initializerName, + equalsIndex, + start: nameStartIndex, + end: tokens.currentIndex(), + }); + } else { + // This is just a declaration, so doesn't need to produce any code in the output. + rangesToRemove.push({start: statementStartIndex, end: tokens.currentIndex()}); + } + } + } + + tokens.restoreToSnapshot(snapshot); + return { + headerInfo, + constructorInitializerStatements, + instanceInitializerNames, + staticInitializerNames, + constructorInsertPos, + fields, + rangesToRemove, + }; +} exports.default = getClassInfo; + +function processClassHeader(tokens) { + const classToken = tokens.currentToken(); + const contextId = classToken.contextId; + if (contextId == null) { + throw new Error("Expected context ID on class token."); + } + const isExpression = classToken.isExpression; + if (isExpression == null) { + throw new Error("Expected isExpression on class token."); + } + let className = null; + let hasSuperclass = false; + tokens.nextToken(); + if (tokens.matches1(_types.TokenType.name)) { + className = tokens.identifierName(); + } + while (!tokens.matchesContextIdAndLabel(_types.TokenType.braceL, contextId)) { + // If this has a superclass, there will always be an `extends` token. If it doesn't have a + // superclass, only type parameters and `implements` clauses can show up here, all of which + // consist only of type tokens. A declaration like `class A {` should *not* count + // as having a superclass. + if (tokens.matches1(_types.TokenType._extends) && !tokens.currentToken().isType) { + hasSuperclass = true; + } + tokens.nextToken(); + } + return {isExpression, className, hasSuperclass}; +} + +/** + * Extract useful information out of a constructor, starting at the "constructor" name. + */ +function processConstructor( + tokens, +) { + const constructorInitializerStatements = []; + + tokens.nextToken(); + const constructorContextId = tokens.currentToken().contextId; + if (constructorContextId == null) { + throw new Error("Expected context ID on open-paren starting constructor params."); + } + tokens.nextToken(); + // Advance through parameters looking for access modifiers. + while (!tokens.matchesContextIdAndLabel(_types.TokenType.parenR, constructorContextId)) { + if (isAccessModifier(tokens.currentToken())) { + tokens.nextToken(); + while (isAccessModifier(tokens.currentToken())) { + tokens.nextToken(); + } + const token = tokens.currentToken(); + if (token.type !== _types.TokenType.name) { + throw new Error("Expected identifier after access modifiers in constructor arg."); + } + const name = tokens.identifierNameForToken(token); + constructorInitializerStatements.push(`this.${name} = ${name}`); + } + tokens.nextToken(); + } + // ) + tokens.nextToken(); + let constructorInsertPos = tokens.currentIndex(); + + // Advance through body looking for a super call. + let foundSuperCall = false; + while (!tokens.matchesContextIdAndLabel(_types.TokenType.braceR, constructorContextId)) { + if (!foundSuperCall && tokens.matches2(_types.TokenType._super, _types.TokenType.parenL)) { + tokens.nextToken(); + const superCallContextId = tokens.currentToken().contextId; + if (superCallContextId == null) { + throw new Error("Expected a context ID on the super call"); + } + while (!tokens.matchesContextIdAndLabel(_types.TokenType.parenR, superCallContextId)) { + tokens.nextToken(); + } + constructorInsertPos = tokens.currentIndex(); + foundSuperCall = true; + } + tokens.nextToken(); + } + // } + tokens.nextToken(); + + return {constructorInitializerStatements, constructorInsertPos}; +} + +/** + * Determine if this is any token that can go before the name in a method/field. + */ +function isAccessModifier(token) { + return [ + _types.TokenType._async, + _types.TokenType._get, + _types.TokenType._set, + _types.TokenType.plus, + _types.TokenType.minus, + _types.TokenType._readonly, + _types.TokenType._static, + _types.TokenType._public, + _types.TokenType._private, + _types.TokenType._protected, + _types.TokenType._abstract, + ].includes(token.type); +} + +/** + * The next token or set of tokens is either an identifier or an expression in square brackets, for + * a method or field name. + */ +function skipFieldName(tokens) { + if (tokens.matches1(_types.TokenType.bracketL)) { + const startToken = tokens.currentToken(); + const classContextId = startToken.contextId; + if (classContextId == null) { + throw new Error("Expected class context ID on computed name open bracket."); + } + while (!tokens.matchesContextIdAndLabel(_types.TokenType.bracketR, classContextId)) { + tokens.nextToken(); + } + tokens.nextToken(); + } else { + tokens.nextToken(); + } +} diff --git a/node_modules/sucrase/dist/util/getClassInfo.mjs b/node_modules/sucrase/dist/util/getClassInfo.mjs new file mode 100644 index 00000000..9ec0bd80 --- /dev/null +++ b/node_modules/sucrase/dist/util/getClassInfo.mjs @@ -0,0 +1,281 @@ + + +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +/** + * Get information about the class fields for this class, given a token processor pointing to the + * open-brace at the start of the class. + */ +export default function getClassInfo( + rootTransformer, + tokens, + nameManager, +) { + const snapshot = tokens.snapshot(); + + const headerInfo = processClassHeader(tokens); + + let constructorInitializerStatements = []; + const instanceInitializerNames = []; + const staticInitializerNames = []; + let constructorInsertPos = null; + const fields = []; + const rangesToRemove = []; + + const classContextId = tokens.currentToken().contextId; + if (classContextId == null) { + throw new Error("Expected non-null class context ID on class open-brace."); + } + + tokens.nextToken(); + while (!tokens.matchesContextIdAndLabel(tt.braceR, classContextId)) { + if (tokens.matchesContextual(ContextualKeyword._constructor) && !tokens.currentToken().isType) { + ({constructorInitializerStatements, constructorInsertPos} = processConstructor(tokens)); + } else if (tokens.matches1(tt.semi)) { + rangesToRemove.push({start: tokens.currentIndex(), end: tokens.currentIndex() + 1}); + tokens.nextToken(); + } else if (tokens.currentToken().isType) { + tokens.nextToken(); + } else { + // Either a method or a field. Skip to the identifier part. + const statementStartIndex = tokens.currentIndex(); + let isStatic = false; + while (isAccessModifier(tokens.currentToken())) { + if (tokens.matches1(tt._static)) { + isStatic = true; + } + tokens.nextToken(); + } + if ( + tokens.matchesContextual(ContextualKeyword._constructor) && + !tokens.currentToken().isType + ) { + ({constructorInitializerStatements, constructorInsertPos} = processConstructor(tokens)); + continue; + } + const nameStartIndex = tokens.currentIndex(); + skipFieldName(tokens); + if (tokens.matches1(tt.lessThan) || tokens.matches1(tt.parenL)) { + // This is a method, so just skip to the next method/field. To do that, we seek forward to + // the next start of a class name (either an open bracket or an identifier, or the closing + // curly brace), then seek backward to include any access modifiers. + while (tokens.currentToken().contextId !== classContextId) { + tokens.nextToken(); + } + while (isAccessModifier(tokens.tokenAtRelativeIndex(-1))) { + tokens.previousToken(); + } + continue; + } + // There might be a type annotation that we need to skip. + while (tokens.currentToken().isType) { + tokens.nextToken(); + } + if (tokens.matches1(tt.eq)) { + const equalsIndex = tokens.currentIndex(); + // This is an initializer, so we need to wrap in an initializer method. + const valueEnd = tokens.currentToken().rhsEndIndex; + if (valueEnd == null) { + throw new Error("Expected rhsEndIndex on class field assignment."); + } + tokens.nextToken(); + while (tokens.currentIndex() < valueEnd) { + rootTransformer.processToken(); + } + let initializerName; + if (isStatic) { + initializerName = nameManager.claimFreeName("__initStatic"); + staticInitializerNames.push(initializerName); + } else { + initializerName = nameManager.claimFreeName("__init"); + instanceInitializerNames.push(initializerName); + } + // Fields start at the name, so `static x = 1;` has a field range of `x = 1;`. + fields.push({ + initializerName, + equalsIndex, + start: nameStartIndex, + end: tokens.currentIndex(), + }); + } else { + // This is just a declaration, so doesn't need to produce any code in the output. + rangesToRemove.push({start: statementStartIndex, end: tokens.currentIndex()}); + } + } + } + + tokens.restoreToSnapshot(snapshot); + return { + headerInfo, + constructorInitializerStatements, + instanceInitializerNames, + staticInitializerNames, + constructorInsertPos, + fields, + rangesToRemove, + }; +} + +function processClassHeader(tokens) { + const classToken = tokens.currentToken(); + const contextId = classToken.contextId; + if (contextId == null) { + throw new Error("Expected context ID on class token."); + } + const isExpression = classToken.isExpression; + if (isExpression == null) { + throw new Error("Expected isExpression on class token."); + } + let className = null; + let hasSuperclass = false; + tokens.nextToken(); + if (tokens.matches1(tt.name)) { + className = tokens.identifierName(); + } + while (!tokens.matchesContextIdAndLabel(tt.braceL, contextId)) { + // If this has a superclass, there will always be an `extends` token. If it doesn't have a + // superclass, only type parameters and `implements` clauses can show up here, all of which + // consist only of type tokens. A declaration like `class A {` should *not* count + // as having a superclass. + if (tokens.matches1(tt._extends) && !tokens.currentToken().isType) { + hasSuperclass = true; + } + tokens.nextToken(); + } + return {isExpression, className, hasSuperclass}; +} + +/** + * Extract useful information out of a constructor, starting at the "constructor" name. + */ +function processConstructor( + tokens, +) { + const constructorInitializerStatements = []; + + tokens.nextToken(); + const constructorContextId = tokens.currentToken().contextId; + if (constructorContextId == null) { + throw new Error("Expected context ID on open-paren starting constructor params."); + } + tokens.nextToken(); + // Advance through parameters looking for access modifiers. + while (!tokens.matchesContextIdAndLabel(tt.parenR, constructorContextId)) { + if (isAccessModifier(tokens.currentToken())) { + tokens.nextToken(); + while (isAccessModifier(tokens.currentToken())) { + tokens.nextToken(); + } + const token = tokens.currentToken(); + if (token.type !== tt.name) { + throw new Error("Expected identifier after access modifiers in constructor arg."); + } + const name = tokens.identifierNameForToken(token); + constructorInitializerStatements.push(`this.${name} = ${name}`); + } + tokens.nextToken(); + } + // ) + tokens.nextToken(); + let constructorInsertPos = tokens.currentIndex(); + + // Advance through body looking for a super call. + let foundSuperCall = false; + while (!tokens.matchesContextIdAndLabel(tt.braceR, constructorContextId)) { + if (!foundSuperCall && tokens.matches2(tt._super, tt.parenL)) { + tokens.nextToken(); + const superCallContextId = tokens.currentToken().contextId; + if (superCallContextId == null) { + throw new Error("Expected a context ID on the super call"); + } + while (!tokens.matchesContextIdAndLabel(tt.parenR, superCallContextId)) { + tokens.nextToken(); + } + constructorInsertPos = tokens.currentIndex(); + foundSuperCall = true; + } + tokens.nextToken(); + } + // } + tokens.nextToken(); + + return {constructorInitializerStatements, constructorInsertPos}; +} + +/** + * Determine if this is any token that can go before the name in a method/field. + */ +function isAccessModifier(token) { + return [ + tt._async, + tt._get, + tt._set, + tt.plus, + tt.minus, + tt._readonly, + tt._static, + tt._public, + tt._private, + tt._protected, + tt._abstract, + ].includes(token.type); +} + +/** + * The next token or set of tokens is either an identifier or an expression in square brackets, for + * a method or field name. + */ +function skipFieldName(tokens) { + if (tokens.matches1(tt.bracketL)) { + const startToken = tokens.currentToken(); + const classContextId = startToken.contextId; + if (classContextId == null) { + throw new Error("Expected class context ID on computed name open bracket."); + } + while (!tokens.matchesContextIdAndLabel(tt.bracketR, classContextId)) { + tokens.nextToken(); + } + tokens.nextToken(); + } else { + tokens.nextToken(); + } +} diff --git a/node_modules/sucrase/dist/util/getDeclarationInfo.d.ts b/node_modules/sucrase/dist/util/getDeclarationInfo.d.ts new file mode 100644 index 00000000..a41795d6 --- /dev/null +++ b/node_modules/sucrase/dist/util/getDeclarationInfo.d.ts @@ -0,0 +1,18 @@ +import TokenProcessor from "../TokenProcessor"; +export interface DeclarationInfo { + typeDeclarations: Set; + valueDeclarations: Set; +} +export declare const EMPTY_DECLARATION_INFO: DeclarationInfo; +/** + * Get all top-level identifiers that should be preserved when exported in TypeScript. + * + * Examples: + * - If an identifier is declared as `const x`, then `export {x}` should be preserved. + * - If it's declared as `type x`, then `export {x}` should be removed. + * - If it's declared as both `const x` and `type x`, then the export should be preserved. + * - Classes and enums should be preserved (even though they also introduce types). + * - Imported identifiers should be preserved since we don't have enough information to + * rule them out. --isolatedModules disallows re-exports, which catches errors here. + */ +export default function getDeclarationInfo(tokens: TokenProcessor): DeclarationInfo; diff --git a/node_modules/sucrase/dist/util/getDeclarationInfo.js b/node_modules/sucrase/dist/util/getDeclarationInfo.js new file mode 100644 index 00000000..be853c13 --- /dev/null +++ b/node_modules/sucrase/dist/util/getDeclarationInfo.js @@ -0,0 +1,40 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _tokenizer = require('../parser/tokenizer'); +var _types = require('../parser/tokenizer/types'); + + + + + + + + const EMPTY_DECLARATION_INFO = { + typeDeclarations: new Set(), + valueDeclarations: new Set(), +}; exports.EMPTY_DECLARATION_INFO = EMPTY_DECLARATION_INFO; + +/** + * Get all top-level identifiers that should be preserved when exported in TypeScript. + * + * Examples: + * - If an identifier is declared as `const x`, then `export {x}` should be preserved. + * - If it's declared as `type x`, then `export {x}` should be removed. + * - If it's declared as both `const x` and `type x`, then the export should be preserved. + * - Classes and enums should be preserved (even though they also introduce types). + * - Imported identifiers should be preserved since we don't have enough information to + * rule them out. --isolatedModules disallows re-exports, which catches errors here. + */ + function getDeclarationInfo(tokens) { + const typeDeclarations = new Set(); + const valueDeclarations = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + const token = tokens.tokens[i]; + if (token.type === _types.TokenType.name && _tokenizer.isTopLevelDeclaration.call(void 0, token)) { + if (token.isType) { + typeDeclarations.add(tokens.identifierNameForToken(token)); + } else { + valueDeclarations.add(tokens.identifierNameForToken(token)); + } + } + } + return {typeDeclarations, valueDeclarations}; +} exports.default = getDeclarationInfo; diff --git a/node_modules/sucrase/dist/util/getDeclarationInfo.mjs b/node_modules/sucrase/dist/util/getDeclarationInfo.mjs new file mode 100644 index 00000000..ade9a817 --- /dev/null +++ b/node_modules/sucrase/dist/util/getDeclarationInfo.mjs @@ -0,0 +1,40 @@ +import {isTopLevelDeclaration} from "../parser/tokenizer"; +import {TokenType as tt} from "../parser/tokenizer/types"; + + + + + + + +export const EMPTY_DECLARATION_INFO = { + typeDeclarations: new Set(), + valueDeclarations: new Set(), +}; + +/** + * Get all top-level identifiers that should be preserved when exported in TypeScript. + * + * Examples: + * - If an identifier is declared as `const x`, then `export {x}` should be preserved. + * - If it's declared as `type x`, then `export {x}` should be removed. + * - If it's declared as both `const x` and `type x`, then the export should be preserved. + * - Classes and enums should be preserved (even though they also introduce types). + * - Imported identifiers should be preserved since we don't have enough information to + * rule them out. --isolatedModules disallows re-exports, which catches errors here. + */ +export default function getDeclarationInfo(tokens) { + const typeDeclarations = new Set(); + const valueDeclarations = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + const token = tokens.tokens[i]; + if (token.type === tt.name && isTopLevelDeclaration(token)) { + if (token.isType) { + typeDeclarations.add(tokens.identifierNameForToken(token)); + } else { + valueDeclarations.add(tokens.identifierNameForToken(token)); + } + } + } + return {typeDeclarations, valueDeclarations}; +} diff --git a/node_modules/sucrase/dist/util/getJSXPragmaInfo.d.ts b/node_modules/sucrase/dist/util/getJSXPragmaInfo.d.ts new file mode 100644 index 00000000..708317a9 --- /dev/null +++ b/node_modules/sucrase/dist/util/getJSXPragmaInfo.d.ts @@ -0,0 +1,8 @@ +import { Options } from "../index"; +export interface JSXPragmaInfo { + base: string; + suffix: string; + fragmentBase: string; + fragmentSuffix: string; +} +export default function getJSXPragmaInfo(options: Options): JSXPragmaInfo; diff --git a/node_modules/sucrase/dist/util/getJSXPragmaInfo.js b/node_modules/sucrase/dist/util/getJSXPragmaInfo.js new file mode 100644 index 00000000..67513db0 --- /dev/null +++ b/node_modules/sucrase/dist/util/getJSXPragmaInfo.js @@ -0,0 +1,22 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + + + + + + + + + function getJSXPragmaInfo(options) { + const [base, suffix] = splitPragma(options.jsxPragma || "React.createElement"); + const [fragmentBase, fragmentSuffix] = splitPragma(options.jsxFragmentPragma || "React.Fragment"); + return {base, suffix, fragmentBase, fragmentSuffix}; +} exports.default = getJSXPragmaInfo; + +function splitPragma(pragma) { + let dotIndex = pragma.indexOf("."); + if (dotIndex === -1) { + dotIndex = pragma.length; + } + return [pragma.slice(0, dotIndex), pragma.slice(dotIndex)]; +} diff --git a/node_modules/sucrase/dist/util/getJSXPragmaInfo.mjs b/node_modules/sucrase/dist/util/getJSXPragmaInfo.mjs new file mode 100644 index 00000000..9972342e --- /dev/null +++ b/node_modules/sucrase/dist/util/getJSXPragmaInfo.mjs @@ -0,0 +1,22 @@ + + + + + + + + + +export default function getJSXPragmaInfo(options) { + const [base, suffix] = splitPragma(options.jsxPragma || "React.createElement"); + const [fragmentBase, fragmentSuffix] = splitPragma(options.jsxFragmentPragma || "React.Fragment"); + return {base, suffix, fragmentBase, fragmentSuffix}; +} + +function splitPragma(pragma) { + let dotIndex = pragma.indexOf("."); + if (dotIndex === -1) { + dotIndex = pragma.length; + } + return [pragma.slice(0, dotIndex), pragma.slice(dotIndex)]; +} diff --git a/node_modules/sucrase/dist/util/getNonTypeIdentifiers.d.ts b/node_modules/sucrase/dist/util/getNonTypeIdentifiers.d.ts new file mode 100644 index 00000000..f60fdb39 --- /dev/null +++ b/node_modules/sucrase/dist/util/getNonTypeIdentifiers.d.ts @@ -0,0 +1,3 @@ +import { Options } from "../index"; +import TokenProcessor from "../TokenProcessor"; +export declare function getNonTypeIdentifiers(tokens: TokenProcessor, options: Options): Set; diff --git a/node_modules/sucrase/dist/util/getNonTypeIdentifiers.js b/node_modules/sucrase/dist/util/getNonTypeIdentifiers.js new file mode 100644 index 00000000..11ac6241 --- /dev/null +++ b/node_modules/sucrase/dist/util/getNonTypeIdentifiers.js @@ -0,0 +1,43 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true}); +var _tokenizer = require('../parser/tokenizer'); +var _types = require('../parser/tokenizer/types'); + +var _JSXTransformer = require('../transformers/JSXTransformer'); +var _getJSXPragmaInfo = require('./getJSXPragmaInfo'); var _getJSXPragmaInfo2 = _interopRequireDefault(_getJSXPragmaInfo); + + function getNonTypeIdentifiers(tokens, options) { + const jsxPragmaInfo = _getJSXPragmaInfo2.default.call(void 0, options); + const nonTypeIdentifiers = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + const token = tokens.tokens[i]; + if ( + token.type === _types.TokenType.name && + !token.isType && + (token.identifierRole === _tokenizer.IdentifierRole.Access || + token.identifierRole === _tokenizer.IdentifierRole.ObjectShorthand || + token.identifierRole === _tokenizer.IdentifierRole.ExportAccess) && + !token.shadowsGlobal + ) { + nonTypeIdentifiers.add(tokens.identifierNameForToken(token)); + } + if (token.type === _types.TokenType.jsxTagStart) { + nonTypeIdentifiers.add(jsxPragmaInfo.base); + } + if ( + token.type === _types.TokenType.jsxTagStart && + i + 1 < tokens.tokens.length && + tokens.tokens[i + 1].type === _types.TokenType.jsxTagEnd + ) { + nonTypeIdentifiers.add(jsxPragmaInfo.base); + nonTypeIdentifiers.add(jsxPragmaInfo.fragmentBase); + } + if (token.type === _types.TokenType.jsxName && token.identifierRole === _tokenizer.IdentifierRole.Access) { + const identifierName = tokens.identifierNameForToken(token); + // Lower-case single-component tag names like "div" don't count. + if (!_JSXTransformer.startsWithLowerCase.call(void 0, identifierName) || tokens.tokens[i + 1].type === _types.TokenType.dot) { + nonTypeIdentifiers.add(tokens.identifierNameForToken(token)); + } + } + } + return nonTypeIdentifiers; +} exports.getNonTypeIdentifiers = getNonTypeIdentifiers; diff --git a/node_modules/sucrase/dist/util/getNonTypeIdentifiers.mjs b/node_modules/sucrase/dist/util/getNonTypeIdentifiers.mjs new file mode 100644 index 00000000..24c73ddd --- /dev/null +++ b/node_modules/sucrase/dist/util/getNonTypeIdentifiers.mjs @@ -0,0 +1,43 @@ + +import {IdentifierRole} from "../parser/tokenizer"; +import {TokenType, TokenType as tt} from "../parser/tokenizer/types"; + +import {startsWithLowerCase} from "../transformers/JSXTransformer"; +import getJSXPragmaInfo from "./getJSXPragmaInfo"; + +export function getNonTypeIdentifiers(tokens, options) { + const jsxPragmaInfo = getJSXPragmaInfo(options); + const nonTypeIdentifiers = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + const token = tokens.tokens[i]; + if ( + token.type === tt.name && + !token.isType && + (token.identifierRole === IdentifierRole.Access || + token.identifierRole === IdentifierRole.ObjectShorthand || + token.identifierRole === IdentifierRole.ExportAccess) && + !token.shadowsGlobal + ) { + nonTypeIdentifiers.add(tokens.identifierNameForToken(token)); + } + if (token.type === tt.jsxTagStart) { + nonTypeIdentifiers.add(jsxPragmaInfo.base); + } + if ( + token.type === tt.jsxTagStart && + i + 1 < tokens.tokens.length && + tokens.tokens[i + 1].type === tt.jsxTagEnd + ) { + nonTypeIdentifiers.add(jsxPragmaInfo.base); + nonTypeIdentifiers.add(jsxPragmaInfo.fragmentBase); + } + if (token.type === tt.jsxName && token.identifierRole === IdentifierRole.Access) { + const identifierName = tokens.identifierNameForToken(token); + // Lower-case single-component tag names like "div" don't count. + if (!startsWithLowerCase(identifierName) || tokens.tokens[i + 1].type === TokenType.dot) { + nonTypeIdentifiers.add(tokens.identifierNameForToken(token)); + } + } + } + return nonTypeIdentifiers; +} diff --git a/node_modules/sucrase/dist/util/getTSImportedNames.d.ts b/node_modules/sucrase/dist/util/getTSImportedNames.d.ts new file mode 100644 index 00000000..8884b121 --- /dev/null +++ b/node_modules/sucrase/dist/util/getTSImportedNames.d.ts @@ -0,0 +1,9 @@ +import TokenProcessor from "../TokenProcessor"; +/** + * Special case code to scan for imported names in ESM TypeScript. We need to do this so we can + * properly get globals so we can compute shadowed globals. + * + * This is similar to logic in CJSImportProcessor, but trimmed down to avoid logic with CJS + * replacement and flow type imports. + */ +export default function getTSImportedNames(tokens: TokenProcessor): Set; diff --git a/node_modules/sucrase/dist/util/getTSImportedNames.js b/node_modules/sucrase/dist/util/getTSImportedNames.js new file mode 100644 index 00000000..06488cf6 --- /dev/null +++ b/node_modules/sucrase/dist/util/getTSImportedNames.js @@ -0,0 +1,88 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + + +/** + * Special case code to scan for imported names in ESM TypeScript. We need to do this so we can + * properly get globals so we can compute shadowed globals. + * + * This is similar to logic in CJSImportProcessor, but trimmed down to avoid logic with CJS + * replacement and flow type imports. + */ + function getTSImportedNames(tokens) { + const importedNames = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + if ( + tokens.matches1AtIndex(i, _types.TokenType._import) && + !tokens.matches3AtIndex(i, _types.TokenType._import, _types.TokenType.name, _types.TokenType.eq) + ) { + collectNamesForImport(tokens, i, importedNames); + } + } + return importedNames; +} exports.default = getTSImportedNames; + +function collectNamesForImport( + tokens, + index, + importedNames, +) { + index++; + + if (tokens.matches1AtIndex(index, _types.TokenType.parenL)) { + // Dynamic import, so nothing to do + return; + } + + if (tokens.matches1AtIndex(index, _types.TokenType.name)) { + importedNames.add(tokens.identifierNameAtIndex(index)); + index++; + if (tokens.matches1AtIndex(index, _types.TokenType.comma)) { + index++; + } + } + + if (tokens.matches1AtIndex(index, _types.TokenType.star)) { + // * as + index += 2; + importedNames.add(tokens.identifierNameAtIndex(index)); + index++; + } + + if (tokens.matches1AtIndex(index, _types.TokenType.braceL)) { + index++; + collectNamesForNamedImport(tokens, index, importedNames); + } +} + +function collectNamesForNamedImport( + tokens, + index, + importedNames, +) { + while (true) { + if (tokens.matches1AtIndex(index, _types.TokenType.braceR)) { + return; + } + + // We care about the local name, which might be the first token, or if there's an "as", is the + // one after that. + let name = tokens.identifierNameAtIndex(index); + index++; + if (tokens.matchesContextualAtIndex(index, _keywords.ContextualKeyword._as)) { + index++; + name = tokens.identifierNameAtIndex(index); + index++; + } + importedNames.add(name); + if (tokens.matches2AtIndex(index, _types.TokenType.comma, _types.TokenType.braceR)) { + return; + } else if (tokens.matches1AtIndex(index, _types.TokenType.braceR)) { + return; + } else if (tokens.matches1AtIndex(index, _types.TokenType.comma)) { + index++; + } else { + throw new Error(`Unexpected token: ${JSON.stringify(tokens.tokens[index])}`); + } + } +} diff --git a/node_modules/sucrase/dist/util/getTSImportedNames.mjs b/node_modules/sucrase/dist/util/getTSImportedNames.mjs new file mode 100644 index 00000000..29b797cc --- /dev/null +++ b/node_modules/sucrase/dist/util/getTSImportedNames.mjs @@ -0,0 +1,88 @@ +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + + +/** + * Special case code to scan for imported names in ESM TypeScript. We need to do this so we can + * properly get globals so we can compute shadowed globals. + * + * This is similar to logic in CJSImportProcessor, but trimmed down to avoid logic with CJS + * replacement and flow type imports. + */ +export default function getTSImportedNames(tokens) { + const importedNames = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + if ( + tokens.matches1AtIndex(i, tt._import) && + !tokens.matches3AtIndex(i, tt._import, tt.name, tt.eq) + ) { + collectNamesForImport(tokens, i, importedNames); + } + } + return importedNames; +} + +function collectNamesForImport( + tokens, + index, + importedNames, +) { + index++; + + if (tokens.matches1AtIndex(index, tt.parenL)) { + // Dynamic import, so nothing to do + return; + } + + if (tokens.matches1AtIndex(index, tt.name)) { + importedNames.add(tokens.identifierNameAtIndex(index)); + index++; + if (tokens.matches1AtIndex(index, tt.comma)) { + index++; + } + } + + if (tokens.matches1AtIndex(index, tt.star)) { + // * as + index += 2; + importedNames.add(tokens.identifierNameAtIndex(index)); + index++; + } + + if (tokens.matches1AtIndex(index, tt.braceL)) { + index++; + collectNamesForNamedImport(tokens, index, importedNames); + } +} + +function collectNamesForNamedImport( + tokens, + index, + importedNames, +) { + while (true) { + if (tokens.matches1AtIndex(index, tt.braceR)) { + return; + } + + // We care about the local name, which might be the first token, or if there's an "as", is the + // one after that. + let name = tokens.identifierNameAtIndex(index); + index++; + if (tokens.matchesContextualAtIndex(index, ContextualKeyword._as)) { + index++; + name = tokens.identifierNameAtIndex(index); + index++; + } + importedNames.add(name); + if (tokens.matches2AtIndex(index, tt.comma, tt.braceR)) { + return; + } else if (tokens.matches1AtIndex(index, tt.braceR)) { + return; + } else if (tokens.matches1AtIndex(index, tt.comma)) { + index++; + } else { + throw new Error(`Unexpected token: ${JSON.stringify(tokens.tokens[index])}`); + } + } +} diff --git a/node_modules/sucrase/dist/util/isIdentifier.d.ts b/node_modules/sucrase/dist/util/isIdentifier.d.ts new file mode 100644 index 00000000..960e6fe7 --- /dev/null +++ b/node_modules/sucrase/dist/util/isIdentifier.d.ts @@ -0,0 +1 @@ +export default function isIdentifier(name: string): boolean; diff --git a/node_modules/sucrase/dist/util/isIdentifier.js b/node_modules/sucrase/dist/util/isIdentifier.js new file mode 100644 index 00000000..3d242f1f --- /dev/null +++ b/node_modules/sucrase/dist/util/isIdentifier.js @@ -0,0 +1,70 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _identifier = require('../parser/util/identifier'); + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar +// Hard-code a list of reserved words rather than trying to use keywords or contextual keywords +// from the parser, since currently there are various exceptions, like `package` being reserved +// but unused and various contextual keywords being reserved. Note that we assume that all code +// compiled by Sucrase is in a module, so strict mode words and await are all considered reserved +// here. +const RESERVED_WORDS = new Set([ + // Reserved keywords as of ECMAScript 2015 + "break", + "case", + "catch", + "class", + "const", + "continue", + "debugger", + "default", + "delete", + "do", + "else", + "export", + "extends", + "finally", + "for", + "function", + "if", + "import", + "in", + "instanceof", + "new", + "return", + "super", + "switch", + "this", + "throw", + "try", + "typeof", + "var", + "void", + "while", + "with", + "yield", + // Future reserved keywords + "enum", + "implements", + "interface", + "let", + "package", + "private", + "protected", + "public", + "static", + "await", +]); + + function isIdentifier(name) { + if (name.length === 0) { + return false; + } + if (!_identifier.IS_IDENTIFIER_START[name.charCodeAt(0)]) { + return false; + } + for (let i = 1; i < name.length; i++) { + if (!_identifier.IS_IDENTIFIER_CHAR[name.charCodeAt(i)]) { + return false; + } + } + return !RESERVED_WORDS.has(name); +} exports.default = isIdentifier; diff --git a/node_modules/sucrase/dist/util/isIdentifier.mjs b/node_modules/sucrase/dist/util/isIdentifier.mjs new file mode 100644 index 00000000..77306636 --- /dev/null +++ b/node_modules/sucrase/dist/util/isIdentifier.mjs @@ -0,0 +1,70 @@ +import {IS_IDENTIFIER_CHAR, IS_IDENTIFIER_START} from "../parser/util/identifier"; + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar +// Hard-code a list of reserved words rather than trying to use keywords or contextual keywords +// from the parser, since currently there are various exceptions, like `package` being reserved +// but unused and various contextual keywords being reserved. Note that we assume that all code +// compiled by Sucrase is in a module, so strict mode words and await are all considered reserved +// here. +const RESERVED_WORDS = new Set([ + // Reserved keywords as of ECMAScript 2015 + "break", + "case", + "catch", + "class", + "const", + "continue", + "debugger", + "default", + "delete", + "do", + "else", + "export", + "extends", + "finally", + "for", + "function", + "if", + "import", + "in", + "instanceof", + "new", + "return", + "super", + "switch", + "this", + "throw", + "try", + "typeof", + "var", + "void", + "while", + "with", + "yield", + // Future reserved keywords + "enum", + "implements", + "interface", + "let", + "package", + "private", + "protected", + "public", + "static", + "await", +]); + +export default function isIdentifier(name) { + if (name.length === 0) { + return false; + } + if (!IS_IDENTIFIER_START[name.charCodeAt(0)]) { + return false; + } + for (let i = 1; i < name.length; i++) { + if (!IS_IDENTIFIER_CHAR[name.charCodeAt(i)]) { + return false; + } + } + return !RESERVED_WORDS.has(name); +} diff --git a/node_modules/sucrase/dist/util/shouldElideDefaultExport.d.ts b/node_modules/sucrase/dist/util/shouldElideDefaultExport.d.ts new file mode 100644 index 00000000..dab722fb --- /dev/null +++ b/node_modules/sucrase/dist/util/shouldElideDefaultExport.d.ts @@ -0,0 +1,6 @@ +import TokenProcessor from "../TokenProcessor"; +import { DeclarationInfo } from "./getDeclarationInfo"; +/** + * Common method sharing code between CJS and ESM cases, since they're the same here. + */ +export default function shouldElideDefaultExport(isTypeScriptTransformEnabled: boolean, tokens: TokenProcessor, declarationInfo: DeclarationInfo): boolean; diff --git a/node_modules/sucrase/dist/util/shouldElideDefaultExport.js b/node_modules/sucrase/dist/util/shouldElideDefaultExport.js new file mode 100644 index 00000000..efa86550 --- /dev/null +++ b/node_modules/sucrase/dist/util/shouldElideDefaultExport.js @@ -0,0 +1,37 @@ +"use strict"; function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }Object.defineProperty(exports, "__esModule", {value: true});var _types = require('../parser/tokenizer/types'); + + + +/** + * Common method sharing code between CJS and ESM cases, since they're the same here. + */ + function shouldElideDefaultExport( + isTypeScriptTransformEnabled, + tokens, + declarationInfo, +) { + if (!isTypeScriptTransformEnabled) { + return false; + } + const exportToken = tokens.currentToken(); + if (exportToken.rhsEndIndex == null) { + throw new Error("Expected non-null rhsEndIndex on export token."); + } + // The export must be of the form `export default a` or `export default a;`. + const numTokens = exportToken.rhsEndIndex - tokens.currentIndex(); + if ( + numTokens !== 3 && + !(numTokens === 4 && tokens.matches1AtIndex(exportToken.rhsEndIndex - 1, _types.TokenType.semi)) + ) { + return false; + } + const identifierToken = tokens.tokenAtRelativeIndex(2); + if (identifierToken.type !== _types.TokenType.name) { + return false; + } + const exportedName = tokens.identifierNameForToken(identifierToken); + return ( + declarationInfo.typeDeclarations.has(exportedName) && + !declarationInfo.valueDeclarations.has(exportedName) + ); +} exports.default = shouldElideDefaultExport; diff --git a/node_modules/sucrase/dist/util/shouldElideDefaultExport.mjs b/node_modules/sucrase/dist/util/shouldElideDefaultExport.mjs new file mode 100644 index 00000000..5eaa6f0a --- /dev/null +++ b/node_modules/sucrase/dist/util/shouldElideDefaultExport.mjs @@ -0,0 +1,37 @@ +import {TokenType as tt} from "../parser/tokenizer/types"; + + + +/** + * Common method sharing code between CJS and ESM cases, since they're the same here. + */ +export default function shouldElideDefaultExport( + isTypeScriptTransformEnabled, + tokens, + declarationInfo, +) { + if (!isTypeScriptTransformEnabled) { + return false; + } + const exportToken = tokens.currentToken(); + if (exportToken.rhsEndIndex == null) { + throw new Error("Expected non-null rhsEndIndex on export token."); + } + // The export must be of the form `export default a` or `export default a;`. + const numTokens = exportToken.rhsEndIndex - tokens.currentIndex(); + if ( + numTokens !== 3 && + !(numTokens === 4 && tokens.matches1AtIndex(exportToken.rhsEndIndex - 1, tt.semi)) + ) { + return false; + } + const identifierToken = tokens.tokenAtRelativeIndex(2); + if (identifierToken.type !== tt.name) { + return false; + } + const exportedName = tokens.identifierNameForToken(identifierToken); + return ( + declarationInfo.typeDeclarations.has(exportedName) && + !declarationInfo.valueDeclarations.has(exportedName) + ); +} diff --git a/node_modules/sucrase/package.json b/node_modules/sucrase/package.json new file mode 100644 index 00000000..a1ff4f33 --- /dev/null +++ b/node_modules/sucrase/package.json @@ -0,0 +1,89 @@ +{ + "name": "sucrase", + "version": "3.10.1", + "description": "Super-fast alternative to Babel for when you can target modern JS runtimes", + "author": "Alan Pierce ", + "license": "MIT", + "main": "dist/index", + "module": "dist/index.mjs", + "types": "dist/index.d.ts", + "bin": { + "sucrase": "./bin/sucrase", + "sucrase-node": "./bin/sucrase-node" + }, + "scripts": { + "build": "sucrase-node script/build.ts", + "fast-build": "sucrase-node script/build.ts --fast", + "clean": "rm -rf ./build ./dist ./dist-self-build ./dist-types ./example-runner/example-repos", + "generate": "sucrase-node generator/generate.ts", + "benchmark": "sucrase-node benchmark/benchmark.ts", + "microbenchmark": "sucrase-node benchmark/microbenchmark.ts", + "benchmark-react": "sucrase-node benchmark/benchmark-react.ts", + "benchmark-project": "sucrase-node benchmark/benchmark-project.ts", + "lint": "sucrase-node script/lint.ts", + "profile": "node --inspect-brk ./node_modules/.bin/sucrase-node ./benchmark/profile", + "profile-project": "node --inspect-brk ./node_modules/.bin/sucrase-node ./benchmark/benchmark-project.ts --profile", + "prepublishOnly": "yarn clean && yarn build", + "release": "sucrase-node script/release.ts", + "run-examples": "sucrase-node example-runner/example-runner.ts", + "test": "yarn lint && yarn test-only", + "test-only": "mocha './test/**/*.ts'", + "test-with-coverage": "nyc mocha './test/**/*.ts'", + "report-coverage": "nyc report --reporter=text-lcov > coverage.lcov && codecov" + }, + "repository": { + "type": "git", + "url": "https://github.com/alangpierce/sucrase.git" + }, + "keywords": [ + "babel", + "jsx", + "typescript", + "flow" + ], + "bugs": { + "url": "https://github.com/alangpierce/sucrase/issues" + }, + "devDependencies": { + "@babel/cli": "^7.1.5", + "@babel/core": "^7.1.6", + "@babel/plugin-proposal-class-properties": "^7.1.0", + "@babel/plugin-proposal-object-rest-spread": "^7.0.0", + "@babel/plugin-transform-modules-commonjs": "^7.1.0", + "@babel/preset-flow": "^7.0.0", + "@babel/preset-react": "^7.0.0", + "@babel/preset-typescript": "^7.1.0", + "@types/mocha": "^5.2.5", + "@types/mz": "^0.0.32", + "@types/node": "^10.12.10", + "@types/yargs-parser": "^11.0.0", + "codecov": "^3.1.0", + "eslint": "^5.11.1", + "eslint-config-airbnb-base": "^13.1.0", + "eslint-config-prettier": "^3.3.0", + "eslint-plugin-import": "^2.14.0", + "eslint-plugin-prettier": "^3.0.1", + "eslint-plugin-typescript": "^0.14.0", + "mocha": "^5.2.0", + "nyc": "^13.1.0", + "prettier": "^1.15.2", + "sucrase": "^3.10.0", + "tslint": "^5.9.1", + "typescript": "^3.2.2", + "typescript-eslint-parser": "^21.0.2", + "typescript-tslint-plugin": "^0.1.2", + "yargs-parser": "^11.1.1" + }, + "dependencies": { + "commander": "^2.19.0", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.0" + }, + "engines": { + "node": ">=8" + }, + "resolutions": { + "typescript-eslint-parser/typescript-estree": "8.1.0" + } +} diff --git a/node_modules/sucrase/register/index.js b/node_modules/sucrase/register/index.js new file mode 100644 index 00000000..f6eb814b --- /dev/null +++ b/node_modules/sucrase/register/index.js @@ -0,0 +1 @@ +require("../dist/register").registerAll(); diff --git a/node_modules/sucrase/register/js.js b/node_modules/sucrase/register/js.js new file mode 100644 index 00000000..48918960 --- /dev/null +++ b/node_modules/sucrase/register/js.js @@ -0,0 +1 @@ +require("../dist/register").registerJS(); diff --git a/node_modules/sucrase/register/jsx.js b/node_modules/sucrase/register/jsx.js new file mode 100644 index 00000000..4bd476ea --- /dev/null +++ b/node_modules/sucrase/register/jsx.js @@ -0,0 +1 @@ +require("../dist/register").registerJSX(); diff --git a/node_modules/sucrase/register/ts-legacy-module-interop.js b/node_modules/sucrase/register/ts-legacy-module-interop.js new file mode 100644 index 00000000..1ec5a6d7 --- /dev/null +++ b/node_modules/sucrase/register/ts-legacy-module-interop.js @@ -0,0 +1 @@ +require("../dist/register").registerTSLegacyModuleInterop(); diff --git a/node_modules/sucrase/register/ts.js b/node_modules/sucrase/register/ts.js new file mode 100644 index 00000000..23b8c885 --- /dev/null +++ b/node_modules/sucrase/register/ts.js @@ -0,0 +1 @@ +require("../dist/register").registerTS(); diff --git a/node_modules/sucrase/register/tsx-legacy-module-interop.js b/node_modules/sucrase/register/tsx-legacy-module-interop.js new file mode 100644 index 00000000..a883680d --- /dev/null +++ b/node_modules/sucrase/register/tsx-legacy-module-interop.js @@ -0,0 +1 @@ +require("../dist/register").registerTSXLegacyModuleInterop(); diff --git a/node_modules/sucrase/register/tsx.js b/node_modules/sucrase/register/tsx.js new file mode 100644 index 00000000..deb8b34e --- /dev/null +++ b/node_modules/sucrase/register/tsx.js @@ -0,0 +1 @@ +require("../dist/register").registerTSX(); diff --git a/node_modules/supports-color/browser.js b/node_modules/supports-color/browser.js new file mode 100644 index 00000000..62afa3a7 --- /dev/null +++ b/node_modules/supports-color/browser.js @@ -0,0 +1,5 @@ +'use strict'; +module.exports = { + stdout: false, + stderr: false +}; diff --git a/node_modules/supports-color/index.js b/node_modules/supports-color/index.js new file mode 100644 index 00000000..1704131b --- /dev/null +++ b/node_modules/supports-color/index.js @@ -0,0 +1,131 @@ +'use strict'; +const os = require('os'); +const hasFlag = require('has-flag'); + +const env = process.env; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false')) { + forceColor = false; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = true; +} +if ('FORCE_COLOR' in env) { + forceColor = env.FORCE_COLOR.length === 0 || parseInt(env.FORCE_COLOR, 10) !== 0; +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} + +function supportsColor(stream) { + if (forceColor === false) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (stream && !stream.isTTY && forceColor !== true) { + return 0; + } + + const min = forceColor ? 1 : 0; + + if (process.platform === 'win32') { + // Node.js 7.5.0 is the first version of Node.js to include a patch to + // libuv that enables 256 color output on Windows. Anything earlier and it + // won't work. However, here we target Node.js 8 at minimum as it is an LTS + // release, and Node.js 7 is not. Windows 10 build 10586 is the first Windows + // release that supports 256 colors. Windows 10 build 14931 is the first release + // that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(process.versions.node.split('.')[0]) >= 8 && + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + if (env.TERM === 'dumb') { + return min; + } + + return min; +} + +function getSupportLevel(stream) { + const level = supportsColor(stream); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: getSupportLevel(process.stdout), + stderr: getSupportLevel(process.stderr) +}; diff --git a/node_modules/supports-color/license b/node_modules/supports-color/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/supports-color/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/supports-color/package.json b/node_modules/supports-color/package.json new file mode 100644 index 00000000..ad199f5c --- /dev/null +++ b/node_modules/supports-color/package.json @@ -0,0 +1,53 @@ +{ + "name": "supports-color", + "version": "5.5.0", + "description": "Detect whether a terminal supports color", + "license": "MIT", + "repository": "chalk/supports-color", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js", + "browser.js" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "ansi", + "styles", + "tty", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "support", + "supports", + "capability", + "detect", + "truecolor", + "16m" + ], + "dependencies": { + "has-flag": "^3.0.0" + }, + "devDependencies": { + "ava": "^0.25.0", + "import-fresh": "^2.0.0", + "xo": "^0.20.0" + }, + "browser": "browser.js" +} diff --git a/node_modules/supports-color/readme.md b/node_modules/supports-color/readme.md new file mode 100644 index 00000000..f6e40195 --- /dev/null +++ b/node_modules/supports-color/readme.md @@ -0,0 +1,66 @@ +# supports-color [![Build Status](https://travis-ci.org/chalk/supports-color.svg?branch=master)](https://travis-ci.org/chalk/supports-color) + +> Detect whether a terminal supports color + + +## Install + +``` +$ npm install supports-color +``` + + +## Usage + +```js +const supportsColor = require('supports-color'); + +if (supportsColor.stdout) { + console.log('Terminal stdout supports color'); +} + +if (supportsColor.stdout.has256) { + console.log('Terminal stdout supports 256 colors'); +} + +if (supportsColor.stderr.has16m) { + console.log('Terminal stderr supports 16 million colors (truecolor)'); +} +``` + + +## API + +Returns an `Object` with a `stdout` and `stderr` property for testing either streams. Each property is an `Object`, or `false` if color is not supported. + +The `stdout`/`stderr` objects specifies a level of support for color through a `.level` property and a corresponding flag: + +- `.level = 1` and `.hasBasic = true`: Basic color support (16 colors) +- `.level = 2` and `.has256 = true`: 256 color support +- `.level = 3` and `.has16m = true`: Truecolor support (16 million colors) + + +## Info + +It obeys the `--color` and `--no-color` CLI flags. + +Can be overridden by the user with the flags `--color` and `--no-color`. For situations where using `--color` is not possible, add the environment variable `FORCE_COLOR=1` to forcefully enable color or `FORCE_COLOR=0` to forcefully disable. The use of `FORCE_COLOR` overrides all other color support checks. + +Explicit 256/Truecolor mode can be enabled using the `--color=256` and `--color=16m` flags, respectively. + + +## Related + +- [supports-color-cli](https://github.com/chalk/supports-color-cli) - CLI for this module +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/tar/LICENSE b/node_modules/tar/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/tar/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tar/README.md b/node_modules/tar/README.md new file mode 100644 index 00000000..034e4865 --- /dev/null +++ b/node_modules/tar/README.md @@ -0,0 +1,954 @@ +# node-tar + +[![Build Status](https://travis-ci.org/npm/node-tar.svg?branch=master)](https://travis-ci.org/npm/node-tar) + +[Fast](./benchmarks) and full-featured Tar for Node.js + +The API is designed to mimic the behavior of `tar(1)` on unix systems. +If you are familiar with how tar works, most of this will hopefully be +straightforward for you. If not, then hopefully this module can teach +you useful unix skills that may come in handy someday :) + +## Background + +A "tar file" or "tarball" is an archive of file system entries +(directories, files, links, etc.) The name comes from "tape archive". +If you run `man tar` on almost any Unix command line, you'll learn +quite a bit about what it can do, and its history. + +Tar has 5 main top-level commands: + +* `c` Create an archive +* `r` Replace entries within an archive +* `u` Update entries within an archive (ie, replace if they're newer) +* `t` List out the contents of an archive +* `x` Extract an archive to disk + +The other flags and options modify how this top level function works. + +## High-Level API + +These 5 functions are the high-level API. All of them have a +single-character name (for unix nerds familiar with `tar(1)`) as well +as a long name (for everyone else). + +All the high-level functions take the following arguments, all three +of which are optional and may be omitted. + +1. `options` - An optional object specifying various options +2. `paths` - An array of paths to add or extract +3. `callback` - Called when the command is completed, if async. (If + sync or no file specified, providing a callback throws a + `TypeError`.) + +If the command is sync (ie, if `options.sync=true`), then the +callback is not allowed, since the action will be completed immediately. + +If a `file` argument is specified, and the command is async, then a +`Promise` is returned. In this case, if async, a callback may be +provided which is called when the command is completed. + +If a `file` option is not specified, then a stream is returned. For +`create`, this is a readable stream of the generated archive. For +`list` and `extract` this is a writable stream that an archive should +be written into. If a file is not specified, then a callback is not +allowed, because you're already getting a stream to work with. + +`replace` and `update` only work on existing archives, and so require +a `file` argument. + +Sync commands without a file argument return a stream that acts on its +input immediately in the same tick. For readable streams, this means +that all of the data is immediately available by calling +`stream.read()`. For writable streams, it will be acted upon as soon +as it is provided, but this can be at any time. + +### Warnings + +Some things cause tar to emit a warning, but should usually not cause +the entire operation to fail. There are three ways to handle +warnings: + +1. **Ignore them** (default) Invalid entries won't be put in the + archive, and invalid entries won't be unpacked. This is usually + fine, but can hide failures that you might care about. +2. **Notice them** Add an `onwarn` function to the options, or listen + to the `'warn'` event on any tar stream. The function will get + called as `onwarn(message, data)`. Handle as appropriate. +3. **Explode them.** Set `strict: true` in the options object, and + `warn` messages will be emitted as `'error'` events instead. If + there's no `error` handler, this causes the program to crash. If + used with a promise-returning/callback-taking method, then it'll + send the error to the promise/callback. + +### Examples + +The API mimics the `tar(1)` command line functionality, with aliases +for more human-readable option and function names. The goal is that +if you know how to use `tar(1)` in Unix, then you know how to use +`require('tar')` in JavaScript. + +To replicate `tar czf my-tarball.tgz files and folders`, you'd do: + +```js +tar.c( + { + gzip: , + file: 'my-tarball.tgz' + }, + ['some', 'files', 'and', 'folders'] +).then(_ => { .. tarball has been created .. }) +``` + +To replicate `tar cz files and folders > my-tarball.tgz`, you'd do: + +```js +tar.c( // or tar.create + { + gzip: + }, + ['some', 'files', 'and', 'folders'] +).pipe(fs.createWriteStream('my-tarball.tgz')) +``` + +To replicate `tar xf my-tarball.tgz` you'd do: + +```js +tar.x( // or tar.extract( + { + file: 'my-tarball.tgz' + } +).then(_=> { .. tarball has been dumped in cwd .. }) +``` + +To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`: + +```js +fs.createReadStream('my-tarball.tgz').pipe( + tar.x({ + strip: 1, + C: 'some-dir' // alias for cwd:'some-dir', also ok + }) +) +``` + +To replicate `tar tf my-tarball.tgz`, do this: + +```js +tar.t({ + file: 'my-tarball.tgz', + onentry: entry => { .. do whatever with it .. } +}) +``` + +To replicate `cat my-tarball.tgz | tar t` do: + +```js +fs.createReadStream('my-tarball.tgz') + .pipe(tar.t()) + .on('entry', entry => { .. do whatever with it .. }) +``` + +To do anything synchronous, add `sync: true` to the options. Note +that sync functions don't take a callback and don't return a promise. +When the function returns, it's already done. Sync methods without a +file argument return a sync stream, which flushes immediately. But, +of course, it still won't be done until you `.end()` it. + +To filter entries, add `filter: ` to the options. +Tar-creating methods call the filter with `filter(path, stat)`. +Tar-reading methods (including extraction) call the filter with +`filter(path, entry)`. The filter is called in the `this`-context of +the `Pack` or `Unpack` stream object. + +The arguments list to `tar t` and `tar x` specify a list of filenames +to extract or list, so they're equivalent to a filter that tests if +the file is in the list. + +For those who _aren't_ fans of tar's single-character command names: + +``` +tar.c === tar.create +tar.r === tar.replace (appends to archive, file is required) +tar.u === tar.update (appends if newer, file is required) +tar.x === tar.extract +tar.t === tar.list +``` + +Keep reading for all the command descriptions and options, as well as +the low-level API that they are built on. + +### tar.c(options, fileList, callback) [alias: tar.create] + +Create a tarball archive. + +The `fileList` is an array of paths to add to the tarball. Adding a +directory also adds its children recursively. + +An entry in `fileList` that starts with an `@` symbol is a tar archive +whose entries will be added. To add a file that starts with `@`, +prepend it with `./`. + +The following options are supported: + +- `file` Write the tarball archive to the specified filename. If this + is specified, then the callback will be fired when the file has been + written, and a promise will be returned that resolves when the file + is written. If a filename is not specified, then a Readable Stream + will be returned which will emit the file data. [Alias: `f`] +- `sync` Act synchronously. If this is set, then any provided file + will be fully written after the call to `tar.c`. If this is set, + and a file is not provided, then the resulting stream will already + have the data ready to `read` or `emit('data')` as soon as you + request it. +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `cwd` The current working directory for creating the archive. + Defaults to `process.cwd()`. [Alias: `C`] +- `prefix` A path portion to prefix onto the entries in the archive. +- `gzip` Set to any truthy value to create a gzipped archive, or an + object with settings for `zlib.Gzip()` [Alias: `z`] +- `filter` A function that gets called with `(path, stat)` for each + entry being added. Return `true` to add the entry to the archive, + or `false` to omit it. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary other + time-based operations. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. [Alias: `P`] +- `mode` The mode to set on the created file archive +- `noDirRecurse` Do not recursively archive the contents of + directories. [Alias: `n`] +- `follow` Set to true to pack the targets of symbolic links. Without + this option, symbolic links are archived as such. [Alias: `L`, `h`] +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + [Alias: `m`, `no-mtime`] +- `mtime` Set to a `Date` object to force a specific `mtime` for + everything added to the archive. Overridden by `noMtime`. + + +The following options are mostly internal, but can be modified in some +advanced use cases, such as re-using caches between runs. + +- `linkCache` A Map object containing the device and inode value for + any file whose nlink is > 1, to identify hard links. +- `statCache` A Map object that caches calls `lstat`. +- `readdirCache` A Map object that caches calls to `readdir`. +- `jobs` A number specifying how many concurrent jobs to run. + Defaults to 4. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. + +### tar.x(options, fileList, callback) [alias: tar.extract] + +Extract a tarball archive. + +The `fileList` is an array of paths to extract from the tarball. If +no paths are provided, then all the entries are extracted. + +If the archive is gzipped, then tar will detect this and unzip it. + +Note that all directories that are created will be forced to be +writable, readable, and listable by their owner, to avoid cases where +a directory prevents extraction of child entries by virtue of its +mode. + +Most extraction errors will cause a `warn` event to be emitted. If +the `cwd` is missing, or not a directory, then the extraction will +fail completely. + +The following options are supported: + +- `cwd` Extract files relative to the specified directory. Defaults + to `process.cwd()`. If provided, this must exist and must be a + directory. [Alias: `C`] +- `file` The archive file to extract. If not specified, then a + Writable stream is returned where the archive data should be + written. [Alias: `f`] +- `sync` Create files and directories synchronously. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `filter` A function that gets called with `(path, entry)` for each + entry being unpacked. Return `true` to unpack the entry from the + archive, or `false` to skip it. +- `newer` Set to true to keep the existing file on disk if it's newer + than the file in the archive. [Alias: `keep-newer`, + `keep-newer-files`] +- `keep` Do not overwrite existing files. In particular, if a file + appears more than once in an archive, later copies will not + overwrite earlier copies. [Alias: `k`, `keep-existing`] +- `preservePaths` Allow absolute paths, paths containing `..`, and + extracting through symbolic links. By default, `/` is stripped from + absolute paths, `..` paths are not extracted, and any file whose + location would be modified by a symbolic link is not extracted. + [Alias: `P`] +- `unlink` Unlink files before creating them. Without this option, + tar overwrites existing files, which preserves existing hardlinks. + With this option, existing hardlinks will be broken, as will any + symlink that would affect the location of an extracted file. [Alias: + `U`] +- `strip` Remove the specified number of leading path elements. + Pathnames with fewer elements will be silently skipped. Note that + the pathname is edited after applying the filter, but before + security checks. [Alias: `strip-components`, `stripComponents`] +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. +- `preserveOwner` If true, tar will set the `uid` and `gid` of + extracted entries to the `uid` and `gid` fields in the archive. + This defaults to true when run as root, and false otherwise. If + false, then files and directories will be set with the owner and + group of the user running the process. This is similar to `-p` in + `tar(1)`, but ACLs and other system-specific data is never unpacked + in this implementation, and modes are set by default already. + [Alias: `p`] +- `uid` Set to a number to force ownership of all extracted files and + folders, and all implicitly created directories, to be owned by the + specified user id, regardless of the `uid` field in the archive. + Cannot be used along with `preserveOwner`. Requires also setting a + `gid` option. +- `gid` Set to a number to force ownership of all extracted files and + folders, and all implicitly created directories, to be owned by the + specified group id, regardless of the `gid` field in the archive. + Cannot be used along with `preserveOwner`. Requires also setting a + `uid` option. +- `noMtime` Set to true to omit writing `mtime` value for extracted + entries. [Alias: `m`, `no-mtime`] +- `transform` Provide a function that takes an `entry` object, and + returns a stream, or any falsey value. If a stream is provided, + then that stream's data will be written instead of the contents of + the archive entry. If a falsey value is provided, then the entry is + written to disk as normal. (To exclude items from extraction, use + the `filter` option described above.) +- `onentry` A function that gets called with `(entry)` for each entry + that passes the filter. + +The following options are mostly internal, but can be modified in some +advanced use cases, such as re-using caches between runs. + +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `umask` Filter the modes of entries like `process.umask()`. +- `dmode` Default mode for directories +- `fmode` Default mode for files +- `dirCache` A Map object of which directories exist. +- `maxMetaEntrySize` The maximum size of meta entries that is + supported. Defaults to 1 MB. + +Note that using an asynchronous stream type with the `transform` +option will cause undefined behavior in sync extractions. +[MiniPass](http://npm.im/minipass)-based streams are designed for this +use case. + +### tar.t(options, fileList, callback) [alias: tar.list] + +List the contents of a tarball archive. + +The `fileList` is an array of paths to list from the tarball. If +no paths are provided, then all the entries are listed. + +If the archive is gzipped, then tar will detect this and unzip it. + +Returns an event emitter that emits `entry` events with +`tar.ReadEntry` objects. However, they don't emit `'data'` or `'end'` +events. (If you want to get actual readable entries, use the +`tar.Parse` class instead.) + +The following options are supported: + +- `cwd` Extract files relative to the specified directory. Defaults + to `process.cwd()`. [Alias: `C`] +- `file` The archive file to list. If not specified, then a + Writable stream is returned where the archive data should be + written. [Alias: `f`] +- `sync` Read the specified file synchronously. (This has no effect + when a file option isn't specified, because entries are emitted as + fast as they are parsed from the stream anyway.) +- `strict` Treat warnings as crash-worthy errors. Default false. +- `filter` A function that gets called with `(path, entry)` for each + entry being listed. Return `true` to emit the entry from the + archive, or `false` to skip it. +- `onentry` A function that gets called with `(entry)` for each entry + that passes the filter. This is important for when both `file` and + `sync` are set, because it will be called synchronously. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `noResume` By default, `entry` streams are resumed immediately after + the call to `onentry`. Set `noResume: true` to suppress this + behavior. Note that by opting into this, the stream will never + complete until the entry data is consumed. + +### tar.u(options, fileList, callback) [alias: tar.update] + +Add files to an archive if they are newer than the entry already in +the tarball archive. + +The `fileList` is an array of paths to add to the tarball. Adding a +directory also adds its children recursively. + +An entry in `fileList` that starts with an `@` symbol is a tar archive +whose entries will be added. To add a file that starts with `@`, +prepend it with `./`. + +The following options are supported: + +- `file` Required. Write the tarball archive to the specified + filename. [Alias: `f`] +- `sync` Act synchronously. If this is set, then any provided file + will be fully written after the call to `tar.c`. +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `cwd` The current working directory for adding entries to the + archive. Defaults to `process.cwd()`. [Alias: `C`] +- `prefix` A path portion to prefix onto the entries in the archive. +- `gzip` Set to any truthy value to create a gzipped archive, or an + object with settings for `zlib.Gzip()` [Alias: `z`] +- `filter` A function that gets called with `(path, stat)` for each + entry being added. Return `true` to add the entry to the archive, + or `false` to omit it. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary other + time-based operations. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. [Alias: `P`] +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `noDirRecurse` Do not recursively archive the contents of + directories. [Alias: `n`] +- `follow` Set to true to pack the targets of symbolic links. Without + this option, symbolic links are archived as such. [Alias: `L`, `h`] +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + [Alias: `m`, `no-mtime`] +- `mtime` Set to a `Date` object to force a specific `mtime` for + everything added to the archive. Overridden by `noMtime`. + +### tar.r(options, fileList, callback) [alias: tar.replace] + +Add files to an existing archive. Because later entries override +earlier entries, this effectively replaces any existing entries. + +The `fileList` is an array of paths to add to the tarball. Adding a +directory also adds its children recursively. + +An entry in `fileList` that starts with an `@` symbol is a tar archive +whose entries will be added. To add a file that starts with `@`, +prepend it with `./`. + +The following options are supported: + +- `file` Required. Write the tarball archive to the specified + filename. [Alias: `f`] +- `sync` Act synchronously. If this is set, then any provided file + will be fully written after the call to `tar.c`. +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `cwd` The current working directory for adding entries to the + archive. Defaults to `process.cwd()`. [Alias: `C`] +- `prefix` A path portion to prefix onto the entries in the archive. +- `gzip` Set to any truthy value to create a gzipped archive, or an + object with settings for `zlib.Gzip()` [Alias: `z`] +- `filter` A function that gets called with `(path, stat)` for each + entry being added. Return `true` to add the entry to the archive, + or `false` to omit it. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary other + time-based operations. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. [Alias: `P`] +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `noDirRecurse` Do not recursively archive the contents of + directories. [Alias: `n`] +- `follow` Set to true to pack the targets of symbolic links. Without + this option, symbolic links are archived as such. [Alias: `L`, `h`] +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + [Alias: `m`, `no-mtime`] +- `mtime` Set to a `Date` object to force a specific `mtime` for + everything added to the archive. Overridden by `noMtime`. + + +## Low-Level API + +### class tar.Pack + +A readable tar stream. + +Has all the standard readable stream interface stuff. `'data'` and +`'end'` events, `read()` method, `pause()` and `resume()`, etc. + +#### constructor(options) + +The following options are supported: + +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `cwd` The current working directory for creating the archive. + Defaults to `process.cwd()`. +- `prefix` A path portion to prefix onto the entries in the archive. +- `gzip` Set to any truthy value to create a gzipped archive, or an + object with settings for `zlib.Gzip()` +- `filter` A function that gets called with `(path, stat)` for each + entry being added. Return `true` to add the entry to the archive, + or `false` to omit it. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary other + time-based operations. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. +- `linkCache` A Map object containing the device and inode value for + any file whose nlink is > 1, to identify hard links. +- `statCache` A Map object that caches calls `lstat`. +- `readdirCache` A Map object that caches calls to `readdir`. +- `jobs` A number specifying how many concurrent jobs to run. + Defaults to 4. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `noDirRecurse` Do not recursively archive the contents of + directories. +- `follow` Set to true to pack the targets of symbolic links. Without + this option, symbolic links are archived as such. +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. +- `mtime` Set to a `Date` object to force a specific `mtime` for + everything added to the archive. Overridden by `noMtime`. + +#### add(path) + +Adds an entry to the archive. Returns the Pack stream. + +#### write(path) + +Adds an entry to the archive. Returns true if flushed. + +#### end() + +Finishes the archive. + +### class tar.Pack.Sync + +Synchronous version of `tar.Pack`. + +### class tar.Unpack + +A writable stream that unpacks a tar archive onto the file system. + +All the normal writable stream stuff is supported. `write()` and +`end()` methods, `'drain'` events, etc. + +Note that all directories that are created will be forced to be +writable, readable, and listable by their owner, to avoid cases where +a directory prevents extraction of child entries by virtue of its +mode. + +`'close'` is emitted when it's done writing stuff to the file system. + +Most unpack errors will cause a `warn` event to be emitted. If the +`cwd` is missing, or not a directory, then an error will be emitted. + +#### constructor(options) + +- `cwd` Extract files relative to the specified directory. Defaults + to `process.cwd()`. If provided, this must exist and must be a + directory. +- `filter` A function that gets called with `(path, entry)` for each + entry being unpacked. Return `true` to unpack the entry from the + archive, or `false` to skip it. +- `newer` Set to true to keep the existing file on disk if it's newer + than the file in the archive. +- `keep` Do not overwrite existing files. In particular, if a file + appears more than once in an archive, later copies will not + overwrite earlier copies. +- `preservePaths` Allow absolute paths, paths containing `..`, and + extracting through symbolic links. By default, `/` is stripped from + absolute paths, `..` paths are not extracted, and any file whose + location would be modified by a symbolic link is not extracted. +- `unlink` Unlink files before creating them. Without this option, + tar overwrites existing files, which preserves existing hardlinks. + With this option, existing hardlinks will be broken, as will any + symlink that would affect the location of an extracted file. +- `strip` Remove the specified number of leading path elements. + Pathnames with fewer elements will be silently skipped. Note that + the pathname is edited after applying the filter, but before + security checks. +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. +- `umask` Filter the modes of entries like `process.umask()`. +- `dmode` Default mode for directories +- `fmode` Default mode for files +- `dirCache` A Map object of which directories exist. +- `maxMetaEntrySize` The maximum size of meta entries that is + supported. Defaults to 1 MB. +- `preserveOwner` If true, tar will set the `uid` and `gid` of + extracted entries to the `uid` and `gid` fields in the archive. + This defaults to true when run as root, and false otherwise. If + false, then files and directories will be set with the owner and + group of the user running the process. This is similar to `-p` in + `tar(1)`, but ACLs and other system-specific data is never unpacked + in this implementation, and modes are set by default already. +- `win32` True if on a windows platform. Causes behavior where + filenames containing `<|>?` chars are converted to + windows-compatible values while being unpacked. +- `uid` Set to a number to force ownership of all extracted files and + folders, and all implicitly created directories, to be owned by the + specified user id, regardless of the `uid` field in the archive. + Cannot be used along with `preserveOwner`. Requires also setting a + `gid` option. +- `gid` Set to a number to force ownership of all extracted files and + folders, and all implicitly created directories, to be owned by the + specified group id, regardless of the `gid` field in the archive. + Cannot be used along with `preserveOwner`. Requires also setting a + `uid` option. +- `noMtime` Set to true to omit writing `mtime` value for extracted + entries. +- `transform` Provide a function that takes an `entry` object, and + returns a stream, or any falsey value. If a stream is provided, + then that stream's data will be written instead of the contents of + the archive entry. If a falsey value is provided, then the entry is + written to disk as normal. (To exclude items from extraction, use + the `filter` option described above.) +- `strict` Treat warnings as crash-worthy errors. Default false. +- `onentry` A function that gets called with `(entry)` for each entry + that passes the filter. +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. + +### class tar.Unpack.Sync + +Synchronous version of `tar.Unpack`. + +Note that using an asynchronous stream type with the `transform` +option will cause undefined behavior in sync unpack streams. +[MiniPass](http://npm.im/minipass)-based streams are designed for this +use case. + +### class tar.Parse + +A writable stream that parses a tar archive stream. All the standard +writable stream stuff is supported. + +If the archive is gzipped, then tar will detect this and unzip it. + +Emits `'entry'` events with `tar.ReadEntry` objects, which are +themselves readable streams that you can pipe wherever. + +Each `entry` will not emit until the one before it is flushed through, +so make sure to either consume the data (with `on('data', ...)` or +`.pipe(...)`) or throw it away with `.resume()` to keep the stream +flowing. + +#### constructor(options) + +Returns an event emitter that emits `entry` events with +`tar.ReadEntry` objects. + +The following options are supported: + +- `strict` Treat warnings as crash-worthy errors. Default false. +- `filter` A function that gets called with `(path, entry)` for each + entry being listed. Return `true` to emit the entry from the + archive, or `false` to skip it. +- `onentry` A function that gets called with `(entry)` for each entry + that passes the filter. +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. + +#### abort(message, error) + +Stop all parsing activities. This is called when there are zlib +errors. It also emits a warning with the message and error provided. + +### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass) + +A representation of an entry that is being read out of a tar archive. + +It has the following fields: + +- `extended` The extended metadata object provided to the constructor. +- `globalExtended` The global extended metadata object provided to the + constructor. +- `remain` The number of bytes remaining to be written into the + stream. +- `blockRemain` The number of 512-byte blocks remaining to be written + into the stream. +- `ignore` Whether this entry should be ignored. +- `meta` True if this represents metadata about the next entry, false + if it represents a filesystem object. +- All the fields from the header, extended header, and global extended + header are added to the ReadEntry object. So it has `path`, `type`, + `size, `mode`, and so on. + +#### constructor(header, extended, globalExtended) + +Create a new ReadEntry object with the specified header, extended +header, and global extended header values. + +### class tar.WriteEntry extends [MiniPass](http://npm.im/minipass) + +A representation of an entry that is being written from the file +system into a tar archive. + +Emits data for the Header, and for the Pax Extended Header if one is +required, as well as any body data. + +Creating a WriteEntry for a directory does not also create +WriteEntry objects for all of the directory contents. + +It has the following fields: + +- `path` The path field that will be written to the archive. By + default, this is also the path from the cwd to the file system + object. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary other + time-based operations. +- `myuid` If supported, the uid of the user running the current + process. +- `myuser` The `env.USER` string if set, or `''`. Set as the entry + `uname` field if the file's `uid` matches `this.myuid`. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 1 MB. +- `linkCache` A Map object containing the device and inode value for + any file whose nlink is > 1, to identify hard links. +- `statCache` A Map object that caches calls `lstat`. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. +- `cwd` The current working directory for creating the archive. + Defaults to `process.cwd()`. +- `absolute` The absolute path to the entry on the filesystem. By + default, this is `path.resolve(this.cwd, this.path)`, but it can be + overridden explicitly. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `win32` True if on a windows platform. Causes behavior where paths + replace `\` with `/` and filenames containing the windows-compatible + forms of `<|>?:` characters are converted to actual `<|>?:` characters + in the archive. +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + + +#### constructor(path, options) + +`path` is the path of the entry as it is written in the archive. + +The following options are supported: + +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary other + time-based operations. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 1 MB. +- `linkCache` A Map object containing the device and inode value for + any file whose nlink is > 1, to identify hard links. +- `statCache` A Map object that caches calls `lstat`. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. +- `cwd` The current working directory for creating the archive. + Defaults to `process.cwd()`. +- `absolute` The absolute path to the entry on the filesystem. By + default, this is `path.resolve(this.cwd, this.path)`, but it can be + overridden explicitly. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `win32` True if on a windows platform. Causes behavior where paths + replace `\` with `/`. +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. +- `umask` Set to restrict the modes on the entries in the archive, + somewhat like how umask works on file creation. Defaults to + `process.umask()` on unix systems, or `0o22` on Windows. + +#### warn(message, data) + +If strict, emit an error with the provided message. + +Othewise, emit a `'warn'` event with the provided message and data. + +### class tar.WriteEntry.Sync + +Synchronous version of tar.WriteEntry + +### class tar.WriteEntry.Tar + +A version of tar.WriteEntry that gets its data from a tar.ReadEntry +instead of from the filesystem. + +#### constructor(readEntry, options) + +`readEntry` is the entry being read out of another archive. + +The following options are supported: + +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary other + time-based operations. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `onwarn` A function that will get called with `(message, data)` for + any warnings encountered. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + +### class tar.Header + +A class for reading and writing header blocks. + +It has the following fields: + +- `nullBlock` True if decoding a block which is entirely composed of + `0x00` null bytes. (Useful because tar files are terminated by + at least 2 null blocks.) +- `cksumValid` True if the checksum in the header is valid, false + otherwise. +- `needPax` True if the values, as encoded, will require a Pax + extended header. +- `path` The path of the entry. +- `mode` The 4 lowest-order octal digits of the file mode. That is, + read/write/execute permissions for world, group, and owner, and the + setuid, setgid, and sticky bits. +- `uid` Numeric user id of the file owner +- `gid` Numeric group id of the file owner +- `size` Size of the file in bytes +- `mtime` Modified time of the file +- `cksum` The checksum of the header. This is generated by adding all + the bytes of the header block, treating the checksum field itself as + all ascii space characters (that is, `0x20`). +- `type` The human-readable name of the type of entry this represents, + or the alphanumeric key if unknown. +- `typeKey` The alphanumeric key for the type of entry this header + represents. +- `linkpath` The target of Link and SymbolicLink entries. +- `uname` Human-readable user name of the file owner +- `gname` Human-readable group name of the file owner +- `devmaj` The major portion of the device number. Always `0` for + files, directories, and links. +- `devmin` The minor portion of the device number. Always `0` for + files, directories, and links. +- `atime` File access time. +- `ctime` File change time. + +#### constructor(data, [offset=0]) + +`data` is optional. It is either a Buffer that should be interpreted +as a tar Header starting at the specified offset and continuing for +512 bytes, or a data object of keys and values to set on the header +object, and eventually encode as a tar Header. + +#### decode(block, offset) + +Decode the provided buffer starting at the specified offset. + +Buffer length must be greater than 512 bytes. + +#### set(data) + +Set the fields in the data object. + +#### encode(buffer, offset) + +Encode the header fields into the buffer at the specified offset. + +Returns `this.needPax` to indicate whether a Pax Extended Header is +required to properly encode the specified data. + +### class tar.Pax + +An object representing a set of key-value pairs in an Pax extended +header entry. + +It has the following fields. Where the same name is used, they have +the same semantics as the tar.Header field of the same name. + +- `global` True if this represents a global extended header, or false + if it is for a single entry. +- `atime` +- `charset` +- `comment` +- `ctime` +- `gid` +- `gname` +- `linkpath` +- `mtime` +- `path` +- `size` +- `uid` +- `uname` +- `dev` +- `ino` +- `nlink` + +#### constructor(object, global) + +Set the fields set in the object. `global` is a boolean that defaults +to false. + +#### encode() + +Return a Buffer containing the header and body for the Pax extended +header entry, or `null` if there is nothing to encode. + +#### encodeBody() + +Return a string representing the body of the pax extended header +entry. + +#### encodeField(fieldName) + +Return a string representing the key/value encoding for the specified +fieldName, or `''` if the field is unset. + +### tar.Pax.parse(string, extended, global) + +Return a new Pax object created by parsing the contents of the string +provided. + +If the `extended` object is set, then also add the fields from that +object. (This is necessary because multiple metadata entries can +occur in sequence.) + +### tar.types + +A translation table for the `type` field in tar headers. + +#### tar.types.name.get(code) + +Get the human-readable name for a given alphanumeric code. + +#### tar.types.code.get(name) + +Get the alphanumeric code for a given human-readable name. diff --git a/node_modules/tar/index.js b/node_modules/tar/index.js new file mode 100644 index 00000000..c9ae06e7 --- /dev/null +++ b/node_modules/tar/index.js @@ -0,0 +1,18 @@ +'use strict' + +// high-level commands +exports.c = exports.create = require('./lib/create.js') +exports.r = exports.replace = require('./lib/replace.js') +exports.t = exports.list = require('./lib/list.js') +exports.u = exports.update = require('./lib/update.js') +exports.x = exports.extract = require('./lib/extract.js') + +// classes +exports.Pack = require('./lib/pack.js') +exports.Unpack = require('./lib/unpack.js') +exports.Parse = require('./lib/parse.js') +exports.ReadEntry = require('./lib/read-entry.js') +exports.WriteEntry = require('./lib/write-entry.js') +exports.Header = require('./lib/header.js') +exports.Pax = require('./lib/pax.js') +exports.types = require('./lib/types.js') diff --git a/node_modules/tar/lib/.mkdir.js.swp b/node_modules/tar/lib/.mkdir.js.swp new file mode 100644 index 0000000000000000000000000000000000000000..b4f372abc526c2b24f9f496916c2212d938527f6 GIT binary patch literal 16384 zcmeI2Z;Tve9mgMwB3RlQ(I_v3rzqV$S$0oAimg|I?S-P0CbnRJ(>l95cRRhgneEJM z&x0!_{sSfTtr25*fnv1yr-2vvR*Yg|{1b@rg}%@zm>3haCdQbcpWpLmcJ8jd(pD2S zv&rXn_WApJzR&;PTzB&B+xO|4+mnjV>y--L_t%-^?#GmVMyV(df?h5W>wk)1)Q;0$ z5U-z1J!lK;^uwc_!+EC&vQ8XzI}7vuC~F^n1r?p8{5rZ;pjO}v3XF=`>u%npZn*xP zW~#sK>aF_nH{5lG#C343K&?QnK&?QnK&?QnK&?QnK&`<4qXNZngZdZ^+u$1aY4`oA zRo@r6-+SHnL#yK5&-$xYpjMz(pjMz(pjMz(pjMz(pjMz(pjMz(pjP0&P=TPY)N8S8 zr#Jw3|L^brr?x2dLwF93!$;r{?1F3H5_oB|QZK@@@HiZS8Mqau;62cS%V7&_g3I8K zo0R$~JPMzJA;d5b_rWyW3b(*6*a*M4OsSLbc{mFD;10M6G@RO~)C=%Mcogo355pd4 z!*#F`{`oqk{sJ$-kKmiI3|)9HY=?Ki+d;!s@V85q`W^fheg-GtVb}+|U_0Cd+u$PD z0KdFMsqespunZ5t_3&0u@FJo=2Pa`3-VASoH^TQWR_Yn}B)lK4f-B+gufcA30iJ>( z+yn2125f?puU6^__&Dr<4rsUnHp8EAl+3quZ?VsxEDVZJ9=a1Iy9=OZ|FpbpiXrxx z4&yweI;zpfY-4e@YT1q4LSXVw5G4APE=sfu#khdOFL}tYV4QEm9+7GcVdvPAT0Ko$sKiW|?M%L0Zq|Z6k!M#;B{)q#AZk+IUk|H*4H( zi4hyCAy{G6c4mZ`?Jir25mU0*G~+5RunV>Arc$syQ5N=!G+U~wMVI77P)wKoW|NOY zusxO(&UBZ$rSo#kt=2-ic=ux8cCQ&-Vb<<3t}C5{JSNDl2-A1&Ke*@i4>UAmY<0x+ zod+2LN-D9kwueD6VDVs>6oWPsAPE*iDNlYg517?dYF8JVV}hY-XO!!CSqsi%Vo0a? zt}`?3KQp@AWVxW;mxh~p-ku9>*BK413et2W=mt#Ua?YW!izUj=zQ!>RE3kON@E_GzrDFaf5U0$Vc6F z&e~iwJ2Rc-nVHTw2~noq45=)PgCaT_Hcf>LunE~t%05)&3~fINm=6s}i1~Rffvu>%--_jIpb~i?Tn;I6taftp91pSX0Z{ zM@}Vp~QH+uH`4rcVl%0n3e<$25;q$*38of>Xt zHg>z+u|Ws3K5N;*bW^q|0&LP^5HVNeyvlaQ7ZIGKCD3etvP2gH6PS;NArnKAvY7AC zyMrW2IeYeXloY9+Y1ktbdnVERlD;>CI2*}nKz71nfW5Lo%SIUod0y_{oMnqJ>F3() z=3=0jx3V<{mdexI*&3AFlOJtzPo@RyG^^oSgBe-SD^MUyM{@(oZVrE1s$Sb9SGsID zawhPtI#4-yu5Fo}*><79!Yn*8;>6T!*bpl5n+Z}@qNR#5a#psPGUsewM%Z1W8fimF zpcsWiM30g=9nd^(0kUw1^q&s&TyTFB$5G02HkY?0@xlFj_V3UK84Nlfq@%d6ne^SD z8!vI6AdBh3mX=X4J4$*5nS(f9G8HXm!I1IdD{^MdG7k7xdoCxk^ioS>#MvM(vZz-y zl-&RKb02-=Qf_tJ|M$Af-4D3me;=NJ&%zMyg?nHpTn!h)ues0v7`_cp!U^~Uh%aCW z_rYH1z%}p>?()Bf=iym+8a@XP!!gJpfd%*wybErG&F}~A`9Fsz;W!+DA$$<_!fh}G zSHjEO^?w6DfiHmg0&avGU<>@2d;Rn9UHB?I0*7HQ$o>BcIE5zPg0I0>;LC6vM61t0 z{izkG6{r=c6{r=c6{r=sFa?gu$=lr`%#GLIJ;xUZdp6g6T)Ok|xSC@*=loS3M9$Br zb-8jdE}~>K_VG1_y4D%l4)A^Pqe(bgt=eP6JiL1q5-%SF0MJ`kLxQflr zpUUyy6w04-GF0yTYdkDvVfHe>yNfAtG8-3Z#g?+hN+-7&ncnw&JJs z;tkQ(`W2lgQf_kSB%h&pb&Q?*THYzy_XZO?6+=Vrv@y5pHQB8Pl0tFFNJ>dGZkIR? z3AFucswL&W#ban@s=aBM7dKVTBO);JmMJ>T|_ j8y37JbACoEp)ukNUd8d6x5B4r$CLLLx^n-we1-o4Xpoxm literal 0 HcmV?d00001 diff --git a/node_modules/tar/lib/buffer.js b/node_modules/tar/lib/buffer.js new file mode 100644 index 00000000..7876d5b3 --- /dev/null +++ b/node_modules/tar/lib/buffer.js @@ -0,0 +1,11 @@ +'use strict' + +// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from +// or Buffer.alloc, and Buffer in node 10 deprecated the ctor. +// .M, this is fine .\^/M.. +let B = Buffer +/* istanbul ignore next */ +if (!B.alloc) { + B = require('safe-buffer').Buffer +} +module.exports = B diff --git a/node_modules/tar/lib/create.js b/node_modules/tar/lib/create.js new file mode 100644 index 00000000..a37aa52e --- /dev/null +++ b/node_modules/tar/lib/create.js @@ -0,0 +1,105 @@ +'use strict' + +// tar -c +const hlo = require('./high-level-opt.js') + +const Pack = require('./pack.js') +const fs = require('fs') +const fsm = require('fs-minipass') +const t = require('./list.js') +const path = require('path') + +const c = module.exports = (opt_, files, cb) => { + if (typeof files === 'function') + cb = files + + if (Array.isArray(opt_)) + files = opt_, opt_ = {} + + if (!files || !Array.isArray(files) || !files.length) + throw new TypeError('no files or directories specified') + + files = Array.from(files) + + const opt = hlo(opt_) + + if (opt.sync && typeof cb === 'function') + throw new TypeError('callback not supported for sync tar functions') + + if (!opt.file && typeof cb === 'function') + throw new TypeError('callback only supported with file option') + + return opt.file && opt.sync ? createFileSync(opt, files) + : opt.file ? createFile(opt, files, cb) + : opt.sync ? createSync(opt, files) + : create(opt, files) +} + +const createFileSync = (opt, files) => { + const p = new Pack.Sync(opt) + const stream = new fsm.WriteStreamSync(opt.file, { + mode: opt.mode || 0o666 + }) + p.pipe(stream) + addFilesSync(p, files) +} + +const createFile = (opt, files, cb) => { + const p = new Pack(opt) + const stream = new fsm.WriteStream(opt.file, { + mode: opt.mode || 0o666 + }) + p.pipe(stream) + + const promise = new Promise((res, rej) => { + stream.on('error', rej) + stream.on('close', res) + p.on('error', rej) + }) + + addFilesAsync(p, files) + + return cb ? promise.then(cb, cb) : promise +} + +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') + t({ + file: path.resolve(p.cwd, file.substr(1)), + sync: true, + noResume: true, + onentry: entry => p.add(entry) + }) + else + p.add(file) + }) + p.end() +} + +const addFilesAsync = (p, files) => { + while (files.length) { + const file = files.shift() + if (file.charAt(0) === '@') + return t({ + file: path.resolve(p.cwd, file.substr(1)), + noResume: true, + onentry: entry => p.add(entry) + }).then(_ => addFilesAsync(p, files)) + else + p.add(file) + } + p.end() +} + +const createSync = (opt, files) => { + const p = new Pack.Sync(opt) + addFilesSync(p, files) + return p +} + +const create = (opt, files) => { + const p = new Pack(opt) + addFilesAsync(p, files) + return p +} diff --git a/node_modules/tar/lib/extract.js b/node_modules/tar/lib/extract.js new file mode 100644 index 00000000..cbb458a0 --- /dev/null +++ b/node_modules/tar/lib/extract.js @@ -0,0 +1,112 @@ +'use strict' + +// tar -x +const hlo = require('./high-level-opt.js') +const Unpack = require('./unpack.js') +const fs = require('fs') +const fsm = require('fs-minipass') +const path = require('path') + +const x = module.exports = (opt_, files, cb) => { + if (typeof opt_ === 'function') + cb = opt_, files = null, opt_ = {} + else if (Array.isArray(opt_)) + files = opt_, opt_ = {} + + if (typeof files === 'function') + cb = files, files = null + + if (!files) + files = [] + else + files = Array.from(files) + + const opt = hlo(opt_) + + if (opt.sync && typeof cb === 'function') + throw new TypeError('callback not supported for sync tar functions') + + if (!opt.file && typeof cb === 'function') + throw new TypeError('callback only supported with file option') + + if (files.length) + filesFilter(opt, files) + + return opt.file && opt.sync ? extractFileSync(opt) + : opt.file ? extractFile(opt, cb) + : opt.sync ? extractSync(opt) + : extract(opt) +} + +// construct a filter that limits the file entries listed +// include child entries if a dir is included +const filesFilter = (opt, files) => { + const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true])) + const filter = opt.filter + + const mapHas = (file, r) => { + const root = r || path.parse(file).root || '.' + const ret = file === root ? false + : map.has(file) ? map.get(file) + : mapHas(path.dirname(file), root) + + map.set(file, ret) + return ret + } + + opt.filter = filter + ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, '')) + : file => mapHas(file.replace(/\/+$/, '')) +} + +const extractFileSync = opt => { + const u = new Unpack.Sync(opt) + + const file = opt.file + let threw = true + let fd + const stat = fs.statSync(file) + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + const readSize = opt.maxReadSize || 16*1024*1024 + const stream = new fsm.ReadStreamSync(file, { + readSize: readSize, + size: stat.size + }) + stream.pipe(u) +} + +const extractFile = (opt, cb) => { + const u = new Unpack(opt) + const readSize = opt.maxReadSize || 16*1024*1024 + + const file = opt.file + const p = new Promise((resolve, reject) => { + u.on('error', reject) + u.on('close', resolve) + + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + fs.stat(file, (er, stat) => { + if (er) + reject(er) + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size + }) + stream.on('error', reject) + stream.pipe(u) + } + }) + }) + return cb ? p.then(cb, cb) : p +} + +const extractSync = opt => { + return new Unpack.Sync(opt) +} + +const extract = opt => { + return new Unpack(opt) +} diff --git a/node_modules/tar/lib/header.js b/node_modules/tar/lib/header.js new file mode 100644 index 00000000..d29c3b99 --- /dev/null +++ b/node_modules/tar/lib/header.js @@ -0,0 +1,289 @@ +'use strict' +// parse a 512-byte header block to a data object, or vice-versa +// encode returns `true` if a pax extended header is needed, because +// the data could not be faithfully encoded in a simple header. +// (Also, check header.needPax to see if it needs a pax header.) + +const Buffer = require('./buffer.js') +const types = require('./types.js') +const pathModule = require('path').posix +const large = require('./large-numbers.js') + +const SLURP = Symbol('slurp') +const TYPE = Symbol('type') + +class Header { + constructor (data, off, ex, gex) { + this.cksumValid = false + this.needPax = false + this.nullBlock = false + + this.block = null + this.path = null + this.mode = null + this.uid = null + this.gid = null + this.size = null + this.mtime = null + this.cksum = null + this[TYPE] = '0' + this.linkpath = null + this.uname = null + this.gname = null + this.devmaj = 0 + this.devmin = 0 + this.atime = null + this.ctime = null + + if (Buffer.isBuffer(data)) + this.decode(data, off || 0, ex, gex) + else if (data) + this.set(data) + } + + decode (buf, off, ex, gex) { + if (!off) + off = 0 + + if (!buf || !(buf.length >= off + 512)) + throw new Error('need 512 bytes for header') + + this.path = decString(buf, off, 100) + this.mode = decNumber(buf, off + 100, 8) + this.uid = decNumber(buf, off + 108, 8) + this.gid = decNumber(buf, off + 116, 8) + this.size = decNumber(buf, off + 124, 12) + this.mtime = decDate(buf, off + 136, 12) + this.cksum = decNumber(buf, off + 148, 12) + + // if we have extended or global extended headers, apply them now + // See https://github.com/npm/node-tar/pull/187 + this[SLURP](ex) + this[SLURP](gex, true) + + // old tar versions marked dirs as a file with a trailing / + this[TYPE] = decString(buf, off + 156, 1) + if (this[TYPE] === '') + this[TYPE] = '0' + if (this[TYPE] === '0' && this.path.substr(-1) === '/') + this[TYPE] = '5' + + // tar implementations sometimes incorrectly put the stat(dir).size + // as the size in the tarball, even though Directory entries are + // not able to have any body at all. In the very rare chance that + // it actually DOES have a body, we weren't going to do anything with + // it anyway, and it'll just be a warning about an invalid header. + if (this[TYPE] === '5') + this.size = 0 + + this.linkpath = decString(buf, off + 157, 100) + if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') { + this.uname = decString(buf, off + 265, 32) + this.gname = decString(buf, off + 297, 32) + this.devmaj = decNumber(buf, off + 329, 8) + this.devmin = decNumber(buf, off + 337, 8) + if (buf[off + 475] !== 0) { + // definitely a prefix, definitely >130 chars. + const prefix = decString(buf, off + 345, 155) + this.path = prefix + '/' + this.path + } else { + const prefix = decString(buf, off + 345, 130) + if (prefix) + this.path = prefix + '/' + this.path + this.atime = decDate(buf, off + 476, 12) + this.ctime = decDate(buf, off + 488, 12) + } + } + + let sum = 8 * 0x20 + for (let i = off; i < off + 148; i++) { + sum += buf[i] + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i] + } + this.cksumValid = sum === this.cksum + if (this.cksum === null && sum === 8 * 0x20) + this.nullBlock = true + } + + [SLURP] (ex, global) { + for (let k in ex) { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. + if (ex[k] !== null && ex[k] !== undefined && + !(global && k === 'path')) + this[k] = ex[k] + } + } + + encode (buf, off) { + if (!buf) { + buf = this.block = Buffer.alloc(512) + off = 0 + } + + if (!off) + off = 0 + + if (!(buf.length >= off + 512)) + throw new Error('need 512 bytes for header') + + const prefixSize = this.ctime || this.atime ? 130 : 155 + const split = splitPrefix(this.path || '', prefixSize) + const path = split[0] + const prefix = split[1] + this.needPax = split[2] + + this.needPax = encString(buf, off, 100, path) || this.needPax + this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax + this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax + this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax + this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax + this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax + buf[off + 156] = this[TYPE].charCodeAt(0) + this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax + buf.write('ustar\u000000', off + 257, 8) + this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax + this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax + this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax + this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax + this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax + if (buf[off + 475] !== 0) + this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax + else { + this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax + this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax + this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax + } + + let sum = 8 * 0x20 + for (let i = off; i < off + 148; i++) { + sum += buf[i] + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i] + } + this.cksum = sum + encNumber(buf, off + 148, 8, this.cksum) + this.cksumValid = true + + return this.needPax + } + + set (data) { + for (let i in data) { + if (data[i] !== null && data[i] !== undefined) + this[i] = data[i] + } + } + + get type () { + return types.name.get(this[TYPE]) || this[TYPE] + } + + get typeKey () { + return this[TYPE] + } + + set type (type) { + if (types.code.has(type)) + this[TYPE] = types.code.get(type) + else + this[TYPE] = type + } +} + +const splitPrefix = (p, prefixSize) => { + const pathSize = 100 + let pp = p + let prefix = '' + let ret + const root = pathModule.parse(p).root || '.' + + if (Buffer.byteLength(pp) < pathSize) + ret = [pp, prefix, false] + else { + // first set prefix to the dir, and path to the base + prefix = pathModule.dirname(pp) + pp = pathModule.basename(pp) + + do { + // both fit! + if (Buffer.byteLength(pp) <= pathSize && + Buffer.byteLength(prefix) <= prefixSize) + ret = [pp, prefix, false] + + // prefix fits in prefix, but path doesn't fit in path + else if (Buffer.byteLength(pp) > pathSize && + Buffer.byteLength(prefix) <= prefixSize) + ret = [pp.substr(0, pathSize - 1), prefix, true] + + else { + // make path take a bit from prefix + pp = pathModule.join(pathModule.basename(prefix), pp) + prefix = pathModule.dirname(prefix) + } + } while (prefix !== root && !ret) + + // at this point, found no resolution, just truncate + if (!ret) + ret = [p.substr(0, pathSize - 1), '', true] + } + return ret +} + +const decString = (buf, off, size) => + buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '') + +const decDate = (buf, off, size) => + numToDate(decNumber(buf, off, size)) + +const numToDate = num => num === null ? null : new Date(num * 1000) + +const decNumber = (buf, off, size) => + buf[off] & 0x80 ? large.parse(buf.slice(off, off + size)) + : decSmallNumber(buf, off, size) + +const nanNull = value => isNaN(value) ? null : value + +const decSmallNumber = (buf, off, size) => + nanNull(parseInt( + buf.slice(off, off + size) + .toString('utf8').replace(/\0.*$/, '').trim(), 8)) + +// the maximum encodable as a null-terminated octal, by field size +const MAXNUM = { + 12: 0o77777777777, + 8 : 0o7777777 +} + +const encNumber = (buf, off, size, number) => + number === null ? false : + number > MAXNUM[size] || number < 0 + ? (large.encode(number, buf.slice(off, off + size)), true) + : (encSmallNumber(buf, off, size, number), false) + +const encSmallNumber = (buf, off, size, number) => + buf.write(octalString(number, size), off, size, 'ascii') + +const octalString = (number, size) => + padOctal(Math.floor(number).toString(8), size) + +const padOctal = (string, size) => + (string.length === size - 1 ? string + : new Array(size - string.length - 1).join('0') + string + ' ') + '\0' + +const encDate = (buf, off, size, date) => + date === null ? false : + encNumber(buf, off, size, date.getTime() / 1000) + +// enough to fill the longest string we've got +const NULLS = new Array(156).join('\0') +// pad with nulls, return true if it's longer or non-ascii +const encString = (buf, off, size, string) => + string === null ? false : + (buf.write(string + NULLS, off, size, 'utf8'), + string.length !== Buffer.byteLength(string) || string.length > size) + +module.exports = Header diff --git a/node_modules/tar/lib/high-level-opt.js b/node_modules/tar/lib/high-level-opt.js new file mode 100644 index 00000000..7333db91 --- /dev/null +++ b/node_modules/tar/lib/high-level-opt.js @@ -0,0 +1,29 @@ +'use strict' + +// turn tar(1) style args like `C` into the more verbose things like `cwd` + +const argmap = new Map([ + ['C', 'cwd'], + ['f', 'file'], + ['z', 'gzip'], + ['P', 'preservePaths'], + ['U', 'unlink'], + ['strip-components', 'strip'], + ['stripComponents', 'strip'], + ['keep-newer', 'newer'], + ['keepNewer', 'newer'], + ['keep-newer-files', 'newer'], + ['keepNewerFiles', 'newer'], + ['k', 'keep'], + ['keep-existing', 'keep'], + ['keepExisting', 'keep'], + ['m', 'noMtime'], + ['no-mtime', 'noMtime'], + ['p', 'preserveOwner'], + ['L', 'follow'], + ['h', 'follow'] +]) + +const parse = module.exports = opt => opt ? Object.keys(opt).map(k => [ + argmap.has(k) ? argmap.get(k) : k, opt[k] +]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {} diff --git a/node_modules/tar/lib/large-numbers.js b/node_modules/tar/lib/large-numbers.js new file mode 100644 index 00000000..3e5c9925 --- /dev/null +++ b/node_modules/tar/lib/large-numbers.js @@ -0,0 +1,97 @@ +'use strict' +// Tar can encode large and negative numbers using a leading byte of +// 0xff for negative, and 0x80 for positive. + +const encode = exports.encode = (num, buf) => { + if (!Number.isSafeInteger(num)) + // The number is so large that javascript cannot represent it with integer + // precision. + throw TypeError('cannot encode number outside of javascript safe integer range') + else if (num < 0) + encodeNegative(num, buf) + else + encodePositive(num, buf) + return buf +} + +const encodePositive = (num, buf) => { + buf[0] = 0x80 + + for (var i = buf.length; i > 1; i--) { + buf[i-1] = num & 0xff + num = Math.floor(num / 0x100) + } +} + +const encodeNegative = (num, buf) => { + buf[0] = 0xff + var flipped = false + num = num * -1 + for (var i = buf.length; i > 1; i--) { + var byte = num & 0xff + num = Math.floor(num / 0x100) + if (flipped) + buf[i-1] = onesComp(byte) + else if (byte === 0) + buf[i-1] = 0 + else { + flipped = true + buf[i-1] = twosComp(byte) + } + } +} + +const parse = exports.parse = (buf) => { + var post = buf[buf.length - 1] + var pre = buf[0] + var value; + if (pre === 0x80) + value = pos(buf.slice(1, buf.length)) + else if (pre === 0xff) + value = twos(buf) + else + throw TypeError('invalid base256 encoding') + + if (!Number.isSafeInteger(value)) + // The number is so large that javascript cannot represent it with integer + // precision. + throw TypeError('parsed number outside of javascript safe integer range') + + return value +} + +const twos = (buf) => { + var len = buf.length + var sum = 0 + var flipped = false + for (var i = len - 1; i > -1; i--) { + var byte = buf[i] + var f + if (flipped) + f = onesComp(byte) + else if (byte === 0) + f = byte + else { + flipped = true + f = twosComp(byte) + } + if (f !== 0) + sum -= f * Math.pow(256, len - i - 1) + } + return sum +} + +const pos = (buf) => { + var len = buf.length + var sum = 0 + for (var i = len - 1; i > -1; i--) { + var byte = buf[i] + if (byte !== 0) + sum += byte * Math.pow(256, len - i - 1) + } + return sum +} + +const onesComp = byte => (0xff ^ byte) & 0xff + +const twosComp = byte => ((0xff ^ byte) + 1) & 0xff diff --git a/node_modules/tar/lib/list.js b/node_modules/tar/lib/list.js new file mode 100644 index 00000000..250ebe00 --- /dev/null +++ b/node_modules/tar/lib/list.js @@ -0,0 +1,130 @@ +'use strict' + +const Buffer = require('./buffer.js') + +// XXX: This shares a lot in common with extract.js +// maybe some DRY opportunity here? + +// tar -t +const hlo = require('./high-level-opt.js') +const Parser = require('./parse.js') +const fs = require('fs') +const fsm = require('fs-minipass') +const path = require('path') + +const t = module.exports = (opt_, files, cb) => { + if (typeof opt_ === 'function') + cb = opt_, files = null, opt_ = {} + else if (Array.isArray(opt_)) + files = opt_, opt_ = {} + + if (typeof files === 'function') + cb = files, files = null + + if (!files) + files = [] + else + files = Array.from(files) + + const opt = hlo(opt_) + + if (opt.sync && typeof cb === 'function') + throw new TypeError('callback not supported for sync tar functions') + + if (!opt.file && typeof cb === 'function') + throw new TypeError('callback only supported with file option') + + if (files.length) + filesFilter(opt, files) + + if (!opt.noResume) + onentryFunction(opt) + + return opt.file && opt.sync ? listFileSync(opt) + : opt.file ? listFile(opt, cb) + : list(opt) +} + +const onentryFunction = opt => { + const onentry = opt.onentry + opt.onentry = onentry ? e => { + onentry(e) + e.resume() + } : e => e.resume() +} + +// construct a filter that limits the file entries listed +// include child entries if a dir is included +const filesFilter = (opt, files) => { + const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true])) + const filter = opt.filter + + const mapHas = (file, r) => { + const root = r || path.parse(file).root || '.' + const ret = file === root ? false + : map.has(file) ? map.get(file) + : mapHas(path.dirname(file), root) + + map.set(file, ret) + return ret + } + + opt.filter = filter + ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, '')) + : file => mapHas(file.replace(/\/+$/, '')) +} + +const listFileSync = opt => { + const p = list(opt) + const file = opt.file + let threw = true + let fd + try { + const stat = fs.statSync(file) + const readSize = opt.maxReadSize || 16*1024*1024 + if (stat.size < readSize) { + p.end(fs.readFileSync(file)) + } else { + let pos = 0 + const buf = Buffer.allocUnsafe(readSize) + fd = fs.openSync(file, 'r') + while (pos < stat.size) { + let bytesRead = fs.readSync(fd, buf, 0, readSize, pos) + pos += bytesRead + p.write(buf.slice(0, bytesRead)) + } + p.end() + } + threw = false + } finally { + if (threw && fd) + try { fs.closeSync(fd) } catch (er) {} + } +} + +const listFile = (opt, cb) => { + const parse = new Parser(opt) + const readSize = opt.maxReadSize || 16*1024*1024 + + const file = opt.file + const p = new Promise((resolve, reject) => { + parse.on('error', reject) + parse.on('end', resolve) + + fs.stat(file, (er, stat) => { + if (er) + reject(er) + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size + }) + stream.on('error', reject) + stream.pipe(parse) + } + }) + }) + return cb ? p.then(cb, cb) : p +} + +const list = opt => new Parser(opt) diff --git a/node_modules/tar/lib/mkdir.js b/node_modules/tar/lib/mkdir.js new file mode 100644 index 00000000..c6a154c2 --- /dev/null +++ b/node_modules/tar/lib/mkdir.js @@ -0,0 +1,206 @@ +'use strict' +// wrapper around mkdirp for tar's needs. + +// TODO: This should probably be a class, not functionally +// passing around state in a gazillion args. + +const mkdirp = require('mkdirp') +const fs = require('fs') +const path = require('path') +const chownr = require('chownr') + +class SymlinkError extends Error { + constructor (symlink, path) { + super('Cannot extract through symbolic link') + this.path = path + this.symlink = symlink + } + + get name () { + return 'SylinkError' + } +} + +class CwdError extends Error { + constructor (path, code) { + super(code + ': Cannot cd into \'' + path + '\'') + this.path = path + this.code = code + } + + get name () { + return 'CwdError' + } +} + +const mkdir = module.exports = (dir, opt, cb) => { + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + const umask = opt.umask + const mode = opt.mode | 0o0700 + const needChmod = (mode & umask) !== 0 + + const uid = opt.uid + const gid = opt.gid + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + ( uid !== opt.processUid || gid !== opt.processGid ) + + const preserve = opt.preserve + const unlink = opt.unlink + const cache = opt.cache + const cwd = opt.cwd + + const done = (er, created) => { + if (er) + cb(er) + else { + cache.set(dir, true) + if (created && doChown) + chownr(created, uid, gid, er => done(er)) + else if (needChmod) + fs.chmod(dir, mode, cb) + else + cb() + } + } + + if (cache && cache.get(dir) === true) + return done() + + if (dir === cwd) + return fs.stat(dir, (er, st) => { + if (er || !st.isDirectory()) + er = new CwdError(dir, er && er.code || 'ENOTDIR') + done(er) + }) + + if (preserve) + return mkdirp(dir, mode, done) + + const sub = path.relative(cwd, dir) + const parts = sub.split(/\/|\\/) + mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done) +} + +const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { + if (!parts.length) + return cb(null, created) + const p = parts.shift() + const part = base + '/' + p + if (cache.get(part)) + return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) +} + +const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { + if (er) { + if (er.path && path.dirname(er.path) === cwd && + (er.code === 'ENOTDIR' || er.code === 'ENOENT')) + return cb(new CwdError(cwd, er.code)) + + fs.lstat(part, (statEr, st) => { + if (statEr) + cb(statEr) + else if (st.isDirectory()) + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + else if (unlink) + fs.unlink(part, er => { + if (er) + return cb(er) + fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) + }) + else if (st.isSymbolicLink()) + return cb(new SymlinkError(part, part + '/' + parts.join('/'))) + else + cb(er) + }) + } else { + created = created || part + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } +} + +const mkdirSync = module.exports.sync = (dir, opt) => { + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + const umask = opt.umask + const mode = opt.mode | 0o0700 + const needChmod = (mode & umask) !== 0 + + const uid = opt.uid + const gid = opt.gid + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + ( uid !== opt.processUid || gid !== opt.processGid ) + + const preserve = opt.preserve + const unlink = opt.unlink + const cache = opt.cache + const cwd = opt.cwd + + const done = (created) => { + cache.set(dir, true) + if (created && doChown) + chownr.sync(created, uid, gid) + if (needChmod) + fs.chmodSync(dir, mode) + } + + if (cache && cache.get(dir) === true) + return done() + + if (dir === cwd) { + let ok = false + let code = 'ENOTDIR' + try { + ok = fs.statSync(dir).isDirectory() + } catch (er) { + code = er.code + } finally { + if (!ok) + throw new CwdError(dir, code) + } + done() + return + } + + if (preserve) + return done(mkdirp.sync(dir, mode)) + + const sub = path.relative(cwd, dir) + const parts = sub.split(/\/|\\/) + let created = null + for (let p = parts.shift(), part = cwd; + p && (part += '/' + p); + p = parts.shift()) { + + if (cache.get(part)) + continue + + try { + fs.mkdirSync(part, mode) + created = created || part + cache.set(part, true) + } catch (er) { + if (er.path && path.dirname(er.path) === cwd && + (er.code === 'ENOTDIR' || er.code === 'ENOENT')) + return new CwdError(cwd, er.code) + + const st = fs.lstatSync(part) + if (st.isDirectory()) { + cache.set(part, true) + continue + } else if (unlink) { + fs.unlinkSync(part) + fs.mkdirSync(part, mode) + created = created || part + cache.set(part, true) + continue + } else if (st.isSymbolicLink()) + return new SymlinkError(part, part + '/' + parts.join('/')) + } + } + + return done(created) +} diff --git a/node_modules/tar/lib/mode-fix.js b/node_modules/tar/lib/mode-fix.js new file mode 100644 index 00000000..3363a3b1 --- /dev/null +++ b/node_modules/tar/lib/mode-fix.js @@ -0,0 +1,14 @@ +'use strict' +module.exports = (mode, isDir) => { + mode &= 0o7777 + // if dirs are readable, then they should be listable + if (isDir) { + if (mode & 0o400) + mode |= 0o100 + if (mode & 0o40) + mode |= 0o10 + if (mode & 0o4) + mode |= 0o1 + } + return mode +} diff --git a/node_modules/tar/lib/pack.js b/node_modules/tar/lib/pack.js new file mode 100644 index 00000000..857cea91 --- /dev/null +++ b/node_modules/tar/lib/pack.js @@ -0,0 +1,404 @@ +'use strict' + +const Buffer = require('./buffer.js') + +// A readable tar stream creator +// Technically, this is a transform stream that you write paths into, +// and tar format comes out of. +// The `add()` method is like `write()` but returns this, +// and end() return `this` as well, so you can +// do `new Pack(opt).add('files').add('dir').end().pipe(output) +// You could also do something like: +// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) + +class PackJob { + constructor (path, absolute) { + this.path = path || './' + this.absolute = absolute + this.entry = null + this.stat = null + this.readdir = null + this.pending = false + this.ignore = false + this.piped = false + } +} + +const MiniPass = require('minipass') +const zlib = require('minizlib') +const ReadEntry = require('./read-entry.js') +const WriteEntry = require('./write-entry.js') +const WriteEntrySync = WriteEntry.Sync +const WriteEntryTar = WriteEntry.Tar +const Yallist = require('yallist') +const EOF = Buffer.alloc(1024) +const ONSTAT = Symbol('onStat') +const ENDED = Symbol('ended') +const QUEUE = Symbol('queue') +const CURRENT = Symbol('current') +const PROCESS = Symbol('process') +const PROCESSING = Symbol('processing') +const PROCESSJOB = Symbol('processJob') +const JOBS = Symbol('jobs') +const JOBDONE = Symbol('jobDone') +const ADDFSENTRY = Symbol('addFSEntry') +const ADDTARENTRY = Symbol('addTarEntry') +const STAT = Symbol('stat') +const READDIR = Symbol('readdir') +const ONREADDIR = Symbol('onreaddir') +const PIPE = Symbol('pipe') +const ENTRY = Symbol('entry') +const ENTRYOPT = Symbol('entryOpt') +const WRITEENTRYCLASS = Symbol('writeEntryClass') +const WRITE = Symbol('write') +const ONDRAIN = Symbol('ondrain') + +const fs = require('fs') +const path = require('path') +const warner = require('./warn-mixin.js') + +const Pack = warner(class Pack extends MiniPass { + constructor (opt) { + super(opt) + opt = opt || Object.create(null) + this.opt = opt + this.cwd = opt.cwd || process.cwd() + this.maxReadSize = opt.maxReadSize + this.preservePaths = !!opt.preservePaths + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '') + this.linkCache = opt.linkCache || new Map() + this.statCache = opt.statCache || new Map() + this.readdirCache = opt.readdirCache || new Map() + + this[WRITEENTRYCLASS] = WriteEntry + if (typeof opt.onwarn === 'function') + this.on('warn', opt.onwarn) + + this.zip = null + if (opt.gzip) { + if (typeof opt.gzip !== 'object') + opt.gzip = {} + this.zip = new zlib.Gzip(opt.gzip) + this.zip.on('data', chunk => super.write(chunk)) + this.zip.on('end', _ => super.end()) + this.zip.on('drain', _ => this[ONDRAIN]()) + this.on('resume', _ => this.zip.resume()) + } else + this.on('drain', this[ONDRAIN]) + + this.portable = !!opt.portable + this.noDirRecurse = !!opt.noDirRecurse + this.follow = !!opt.follow + this.noMtime = !!opt.noMtime + this.mtime = opt.mtime || null + + this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true + + this[QUEUE] = new Yallist + this[JOBS] = 0 + this.jobs = +opt.jobs || 4 + this[PROCESSING] = false + this[ENDED] = false + } + + [WRITE] (chunk) { + return super.write(chunk) + } + + add (path) { + this.write(path) + return this + } + + end (path) { + if (path) + this.write(path) + this[ENDED] = true + this[PROCESS]() + return this + } + + write (path) { + if (this[ENDED]) + throw new Error('write after end') + + if (path instanceof ReadEntry) + this[ADDTARENTRY](path) + else + this[ADDFSENTRY](path) + return this.flowing + } + + [ADDTARENTRY] (p) { + const absolute = path.resolve(this.cwd, p.path) + if (this.prefix) + p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '') + + // in this case, we don't have to wait for the stat + if (!this.filter(p.path, p)) + p.resume() + else { + const job = new PackJob(p.path, absolute, false) + job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) + job.entry.on('end', _ => this[JOBDONE](job)) + this[JOBS] += 1 + this[QUEUE].push(job) + } + + this[PROCESS]() + } + + [ADDFSENTRY] (p) { + const absolute = path.resolve(this.cwd, p) + if (this.prefix) + p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '') + + this[QUEUE].push(new PackJob(p, absolute)) + this[PROCESS]() + } + + [STAT] (job) { + job.pending = true + this[JOBS] += 1 + const stat = this.follow ? 'stat' : 'lstat' + fs[stat](job.absolute, (er, stat) => { + job.pending = false + this[JOBS] -= 1 + if (er) + this.emit('error', er) + else + this[ONSTAT](job, stat) + }) + } + + [ONSTAT] (job, stat) { + this.statCache.set(job.absolute, stat) + job.stat = stat + + // now we have the stat, we can filter it. + if (!this.filter(job.path, stat)) + job.ignore = true + + this[PROCESS]() + } + + [READDIR] (job) { + job.pending = true + this[JOBS] += 1 + fs.readdir(job.absolute, (er, entries) => { + job.pending = false + this[JOBS] -= 1 + if (er) + return this.emit('error', er) + this[ONREADDIR](job, entries) + }) + } + + [ONREADDIR] (job, entries) { + this.readdirCache.set(job.absolute, entries) + job.readdir = entries + this[PROCESS]() + } + + [PROCESS] () { + if (this[PROCESSING]) + return + + this[PROCESSING] = true + for (let w = this[QUEUE].head; + w !== null && this[JOBS] < this.jobs; + w = w.next) { + this[PROCESSJOB](w.value) + if (w.value.ignore) { + const p = w.next + this[QUEUE].removeNode(w) + w.next = p + } + } + + this[PROCESSING] = false + + if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { + if (this.zip) + this.zip.end(EOF) + else { + super.write(EOF) + super.end() + } + } + } + + get [CURRENT] () { + return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value + } + + [JOBDONE] (job) { + this[QUEUE].shift() + this[JOBS] -= 1 + this[PROCESS]() + } + + [PROCESSJOB] (job) { + if (job.pending) + return + + if (job.entry) { + if (job === this[CURRENT] && !job.piped) + this[PIPE](job) + return + } + + if (!job.stat) { + if (this.statCache.has(job.absolute)) + this[ONSTAT](job, this.statCache.get(job.absolute)) + else + this[STAT](job) + } + if (!job.stat) + return + + // filtered out! + if (job.ignore) + return + + if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { + if (this.readdirCache.has(job.absolute)) + this[ONREADDIR](job, this.readdirCache.get(job.absolute)) + else + this[READDIR](job) + if (!job.readdir) + return + } + + // we know it doesn't have an entry, because that got checked above + job.entry = this[ENTRY](job) + if (!job.entry) { + job.ignore = true + return + } + + if (job === this[CURRENT] && !job.piped) + this[PIPE](job) + } + + [ENTRYOPT] (job) { + return { + onwarn: (msg, data) => { + this.warn(msg, data) + }, + noPax: this.noPax, + cwd: this.cwd, + absolute: job.absolute, + preservePaths: this.preservePaths, + maxReadSize: this.maxReadSize, + strict: this.strict, + portable: this.portable, + linkCache: this.linkCache, + statCache: this.statCache, + noMtime: this.noMtime, + mtime: this.mtime + } + } + + [ENTRY] (job) { + this[JOBS] += 1 + try { + return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) + .on('end', () => this[JOBDONE](job)) + .on('error', er => this.emit('error', er)) + } catch (er) { + this.emit('error', er) + } + } + + [ONDRAIN] () { + if (this[CURRENT] && this[CURRENT].entry) + this[CURRENT].entry.resume() + } + + // like .pipe() but using super, because our write() is special + [PIPE] (job) { + job.piped = true + + if (job.readdir) + job.readdir.forEach(entry => { + const p = this.prefix ? + job.path.slice(this.prefix.length + 1) || './' + : job.path + + const base = p === './' ? '' : p.replace(/\/*$/, '/') + this[ADDFSENTRY](base + entry) + }) + + const source = job.entry + const zip = this.zip + + if (zip) + source.on('data', chunk => { + if (!zip.write(chunk)) + source.pause() + }) + else + source.on('data', chunk => { + if (!super.write(chunk)) + source.pause() + }) + } + + pause () { + if (this.zip) + this.zip.pause() + return super.pause() + } +}) + +class PackSync extends Pack { + constructor (opt) { + super(opt) + this[WRITEENTRYCLASS] = WriteEntrySync + } + + // pause/resume are no-ops in sync streams. + pause () {} + resume () {} + + [STAT] (job) { + const stat = this.follow ? 'statSync' : 'lstatSync' + this[ONSTAT](job, fs[stat](job.absolute)) + } + + [READDIR] (job, stat) { + this[ONREADDIR](job, fs.readdirSync(job.absolute)) + } + + // gotta get it all in this tick + [PIPE] (job) { + const source = job.entry + const zip = this.zip + + if (job.readdir) + job.readdir.forEach(entry => { + const p = this.prefix ? + job.path.slice(this.prefix.length + 1) || './' + : job.path + + const base = p === './' ? '' : p.replace(/\/*$/, '/') + this[ADDFSENTRY](base + entry) + }) + + if (zip) + source.on('data', chunk => { + zip.write(chunk) + }) + else + source.on('data', chunk => { + super[WRITE](chunk) + }) + } +} + +Pack.Sync = PackSync + +module.exports = Pack diff --git a/node_modules/tar/lib/parse.js b/node_modules/tar/lib/parse.js new file mode 100644 index 00000000..34e3cd70 --- /dev/null +++ b/node_modules/tar/lib/parse.js @@ -0,0 +1,423 @@ +'use strict' + +// this[BUFFER] is the remainder of a chunk if we're waiting for +// the full 512 bytes of a header to come in. We will Buffer.concat() +// it to the next write(), which is a mem copy, but a small one. +// +// this[QUEUE] is a Yallist of entries that haven't been emitted +// yet this can only get filled up if the user keeps write()ing after +// a write() returns false, or does a write() with more than one entry +// +// We don't buffer chunks, we always parse them and either create an +// entry, or push it into the active entry. The ReadEntry class knows +// to throw data away if .ignore=true +// +// Shift entry off the buffer when it emits 'end', and emit 'entry' for +// the next one in the list. +// +// At any time, we're pushing body chunks into the entry at WRITEENTRY, +// and waiting for 'end' on the entry at READENTRY +// +// ignored entries get .resume() called on them straight away + +const warner = require('./warn-mixin.js') +const path = require('path') +const Header = require('./header.js') +const EE = require('events') +const Yallist = require('yallist') +const maxMetaEntrySize = 1024 * 1024 +const Entry = require('./read-entry.js') +const Pax = require('./pax.js') +const zlib = require('minizlib') +const Buffer = require('./buffer.js') + +const gzipHeader = Buffer.from([0x1f, 0x8b]) +const STATE = Symbol('state') +const WRITEENTRY = Symbol('writeEntry') +const READENTRY = Symbol('readEntry') +const NEXTENTRY = Symbol('nextEntry') +const PROCESSENTRY = Symbol('processEntry') +const EX = Symbol('extendedHeader') +const GEX = Symbol('globalExtendedHeader') +const META = Symbol('meta') +const EMITMETA = Symbol('emitMeta') +const BUFFER = Symbol('buffer') +const QUEUE = Symbol('queue') +const ENDED = Symbol('ended') +const EMITTEDEND = Symbol('emittedEnd') +const EMIT = Symbol('emit') +const UNZIP = Symbol('unzip') +const CONSUMECHUNK = Symbol('consumeChunk') +const CONSUMECHUNKSUB = Symbol('consumeChunkSub') +const CONSUMEBODY = Symbol('consumeBody') +const CONSUMEMETA = Symbol('consumeMeta') +const CONSUMEHEADER = Symbol('consumeHeader') +const CONSUMING = Symbol('consuming') +const BUFFERCONCAT = Symbol('bufferConcat') +const MAYBEEND = Symbol('maybeEnd') +const WRITING = Symbol('writing') +const ABORTED = Symbol('aborted') +const DONE = Symbol('onDone') + +const noop = _ => true + +module.exports = warner(class Parser extends EE { + constructor (opt) { + opt = opt || {} + super(opt) + + if (opt.ondone) + this.on(DONE, opt.ondone) + else + this.on(DONE, _ => { + this.emit('prefinish') + this.emit('finish') + this.emit('end') + this.emit('close') + }) + + this.strict = !!opt.strict + this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize + this.filter = typeof opt.filter === 'function' ? opt.filter : noop + + // have to set this so that streams are ok piping into it + this.writable = true + this.readable = false + + this[QUEUE] = new Yallist() + this[BUFFER] = null + this[READENTRY] = null + this[WRITEENTRY] = null + this[STATE] = 'begin' + this[META] = '' + this[EX] = null + this[GEX] = null + this[ENDED] = false + this[UNZIP] = null + this[ABORTED] = false + if (typeof opt.onwarn === 'function') + this.on('warn', opt.onwarn) + if (typeof opt.onentry === 'function') + this.on('entry', opt.onentry) + } + + [CONSUMEHEADER] (chunk, position) { + const header = new Header(chunk, position, this[EX], this[GEX]) + + if (header.nullBlock) + this[EMIT]('nullBlock') + else if (!header.cksumValid) + this.warn('invalid entry', header) + else if (!header.path) + this.warn('invalid: path is required', header) + else { + const type = header.type + if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) + this.warn('invalid: linkpath required', header) + else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) + this.warn('invalid: linkpath forbidden', header) + else { + const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX]) + + if (entry.meta) { + if (entry.size > this.maxMetaEntrySize) { + entry.ignore = true + this[EMIT]('ignoredEntry', entry) + this[STATE] = 'ignore' + } else if (entry.size > 0) { + this[META] = '' + entry.on('data', c => this[META] += c) + this[STATE] = 'meta' + } + } else { + + this[EX] = null + entry.ignore = entry.ignore || !this.filter(entry.path, entry) + if (entry.ignore) { + this[EMIT]('ignoredEntry', entry) + this[STATE] = entry.remain ? 'ignore' : 'begin' + } else { + if (entry.remain) + this[STATE] = 'body' + else { + this[STATE] = 'begin' + entry.end() + } + + if (!this[READENTRY]) { + this[QUEUE].push(entry) + this[NEXTENTRY]() + } else + this[QUEUE].push(entry) + } + } + } + } + } + + [PROCESSENTRY] (entry) { + let go = true + + if (!entry) { + this[READENTRY] = null + go = false + } else if (Array.isArray(entry)) + this.emit.apply(this, entry) + else { + this[READENTRY] = entry + this.emit('entry', entry) + if (!entry.emittedEnd) { + entry.on('end', _ => this[NEXTENTRY]()) + go = false + } + } + + return go + } + + [NEXTENTRY] () { + do {} while (this[PROCESSENTRY](this[QUEUE].shift())) + + if (!this[QUEUE].length) { + // At this point, there's nothing in the queue, but we may have an + // entry which is being consumed (readEntry). + // If we don't, then we definitely can handle more data. + // If we do, and either it's flowing, or it has never had any data + // written to it, then it needs more. + // The only other possibility is that it has returned false from a + // write() call, so we wait for the next drain to continue. + const re = this[READENTRY] + const drainNow = !re || re.flowing || re.size === re.remain + if (drainNow) { + if (!this[WRITING]) + this.emit('drain') + } else + re.once('drain', _ => this.emit('drain')) + } + } + + [CONSUMEBODY] (chunk, position) { + // write up to but no more than writeEntry.blockRemain + const entry = this[WRITEENTRY] + const br = entry.blockRemain + const c = (br >= chunk.length && position === 0) ? chunk + : chunk.slice(position, position + br) + + entry.write(c) + + if (!entry.blockRemain) { + this[STATE] = 'begin' + this[WRITEENTRY] = null + entry.end() + } + + return c.length + } + + [CONSUMEMETA] (chunk, position) { + const entry = this[WRITEENTRY] + const ret = this[CONSUMEBODY](chunk, position) + + // if we finished, then the entry is reset + if (!this[WRITEENTRY]) + this[EMITMETA](entry) + + return ret + } + + [EMIT] (ev, data, extra) { + if (!this[QUEUE].length && !this[READENTRY]) + this.emit(ev, data, extra) + else + this[QUEUE].push([ev, data, extra]) + } + + [EMITMETA] (entry) { + this[EMIT]('meta', this[META]) + switch (entry.type) { + case 'ExtendedHeader': + case 'OldExtendedHeader': + this[EX] = Pax.parse(this[META], this[EX], false) + break + + case 'GlobalExtendedHeader': + this[GEX] = Pax.parse(this[META], this[GEX], true) + break + + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + this[EX] = this[EX] || Object.create(null) + this[EX].path = this[META].replace(/\0.*/, '') + break + + case 'NextFileHasLongLinkpath': + this[EX] = this[EX] || Object.create(null) + this[EX].linkpath = this[META].replace(/\0.*/, '') + break + + /* istanbul ignore next */ + default: throw new Error('unknown meta: ' + entry.type) + } + } + + abort (msg, error) { + this[ABORTED] = true + this.warn(msg, error) + this.emit('abort', error) + this.emit('error', error) + } + + write (chunk) { + if (this[ABORTED]) + return + + // first write, might be gzipped + if (this[UNZIP] === null && chunk) { + if (this[BUFFER]) { + chunk = Buffer.concat([this[BUFFER], chunk]) + this[BUFFER] = null + } + if (chunk.length < gzipHeader.length) { + this[BUFFER] = chunk + return true + } + for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) { + if (chunk[i] !== gzipHeader[i]) + this[UNZIP] = false + } + if (this[UNZIP] === null) { + const ended = this[ENDED] + this[ENDED] = false + this[UNZIP] = new zlib.Unzip() + this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)) + this[UNZIP].on('error', er => + this.abort(er.message, er)) + this[UNZIP].on('end', _ => { + this[ENDED] = true + this[CONSUMECHUNK]() + }) + this[WRITING] = true + const ret = this[UNZIP][ended ? 'end' : 'write' ](chunk) + this[WRITING] = false + return ret + } + } + + this[WRITING] = true + if (this[UNZIP]) + this[UNZIP].write(chunk) + else + this[CONSUMECHUNK](chunk) + this[WRITING] = false + + // return false if there's a queue, or if the current entry isn't flowing + const ret = + this[QUEUE].length ? false : + this[READENTRY] ? this[READENTRY].flowing : + true + + // if we have no queue, then that means a clogged READENTRY + if (!ret && !this[QUEUE].length) + this[READENTRY].once('drain', _ => this.emit('drain')) + + return ret + } + + [BUFFERCONCAT] (c) { + if (c && !this[ABORTED]) + this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c + } + + [MAYBEEND] () { + if (this[ENDED] && + !this[EMITTEDEND] && + !this[ABORTED] && + !this[CONSUMING]) { + this[EMITTEDEND] = true + const entry = this[WRITEENTRY] + if (entry && entry.blockRemain) { + const have = this[BUFFER] ? this[BUFFER].length : 0 + this.warn('Truncated input (needed ' + entry.blockRemain + + ' more bytes, only ' + have + ' available)', entry) + if (this[BUFFER]) + entry.write(this[BUFFER]) + entry.end() + } + this[EMIT](DONE) + } + } + + [CONSUMECHUNK] (chunk) { + if (this[CONSUMING]) { + this[BUFFERCONCAT](chunk) + } else if (!chunk && !this[BUFFER]) { + this[MAYBEEND]() + } else { + this[CONSUMING] = true + if (this[BUFFER]) { + this[BUFFERCONCAT](chunk) + const c = this[BUFFER] + this[BUFFER] = null + this[CONSUMECHUNKSUB](c) + } else { + this[CONSUMECHUNKSUB](chunk) + } + + while (this[BUFFER] && this[BUFFER].length >= 512 && !this[ABORTED]) { + const c = this[BUFFER] + this[BUFFER] = null + this[CONSUMECHUNKSUB](c) + } + this[CONSUMING] = false + } + + if (!this[BUFFER] || this[ENDED]) + this[MAYBEEND]() + } + + [CONSUMECHUNKSUB] (chunk) { + // we know that we are in CONSUMING mode, so anything written goes into + // the buffer. Advance the position and put any remainder in the buffer. + let position = 0 + let length = chunk.length + while (position + 512 <= length && !this[ABORTED]) { + switch (this[STATE]) { + case 'begin': + this[CONSUMEHEADER](chunk, position) + position += 512 + break + + case 'ignore': + case 'body': + position += this[CONSUMEBODY](chunk, position) + break + + case 'meta': + position += this[CONSUMEMETA](chunk, position) + break + + /* istanbul ignore next */ + default: + throw new Error('invalid state: ' + this[STATE]) + } + } + + if (position < length) { + if (this[BUFFER]) + this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]]) + else + this[BUFFER] = chunk.slice(position) + } + } + + end (chunk) { + if (!this[ABORTED]) { + if (this[UNZIP]) + this[UNZIP].end(chunk) + else { + this[ENDED] = true + this.write(chunk) + } + } + } +}) diff --git a/node_modules/tar/lib/pax.js b/node_modules/tar/lib/pax.js new file mode 100644 index 00000000..9d7e4aba --- /dev/null +++ b/node_modules/tar/lib/pax.js @@ -0,0 +1,146 @@ +'use strict' +const Buffer = require('./buffer.js') +const Header = require('./header.js') +const path = require('path') + +class Pax { + constructor (obj, global) { + this.atime = obj.atime || null + this.charset = obj.charset || null + this.comment = obj.comment || null + this.ctime = obj.ctime || null + this.gid = obj.gid || null + this.gname = obj.gname || null + this.linkpath = obj.linkpath || null + this.mtime = obj.mtime || null + this.path = obj.path || null + this.size = obj.size || null + this.uid = obj.uid || null + this.uname = obj.uname || null + this.dev = obj.dev || null + this.ino = obj.ino || null + this.nlink = obj.nlink || null + this.global = global || false + } + + encode () { + const body = this.encodeBody() + if (body === '') + return null + + const bodyLen = Buffer.byteLength(body) + // round up to 512 bytes + // add 512 for header + const bufLen = 512 * Math.ceil(1 + bodyLen / 512) + const buf = Buffer.allocUnsafe(bufLen) + + // 0-fill the header section, it might not hit every field + for (let i = 0; i < 512; i++) { + buf[i] = 0 + } + + new Header({ + // XXX split the path + // then the path should be PaxHeader + basename, but less than 99, + // prepend with the dirname + path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99), + mode: this.mode || 0o644, + uid: this.uid || null, + gid: this.gid || null, + size: bodyLen, + mtime: this.mtime || null, + type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', + linkpath: '', + uname: this.uname || '', + gname: this.gname || '', + devmaj: 0, + devmin: 0, + atime: this.atime || null, + ctime: this.ctime || null + }).encode(buf) + + buf.write(body, 512, bodyLen, 'utf8') + + // null pad after the body + for (let i = bodyLen + 512; i < buf.length; i++) { + buf[i] = 0 + } + + return buf + } + + encodeBody () { + return ( + this.encodeField('path') + + this.encodeField('ctime') + + this.encodeField('atime') + + this.encodeField('dev') + + this.encodeField('ino') + + this.encodeField('nlink') + + this.encodeField('charset') + + this.encodeField('comment') + + this.encodeField('gid') + + this.encodeField('gname') + + this.encodeField('linkpath') + + this.encodeField('mtime') + + this.encodeField('size') + + this.encodeField('uid') + + this.encodeField('uname') + ) + } + + encodeField (field) { + if (this[field] === null || this[field] === undefined) + return '' + const v = this[field] instanceof Date ? this[field].getTime() / 1000 + : this[field] + const s = ' ' + + (field === 'dev' || field === 'ino' || field === 'nlink' + ? 'SCHILY.' : '') + + field + '=' + v + '\n' + const byteLen = Buffer.byteLength(s) + // the digits includes the length of the digits in ascii base-10 + // so if it's 9 characters, then adding 1 for the 9 makes it 10 + // which makes it 11 chars. + let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1 + if (byteLen + digits >= Math.pow(10, digits)) + digits += 1 + const len = digits + byteLen + return len + s + } +} + +Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g) + +const merge = (a, b) => + b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a + +const parseKV = string => + string + .replace(/\n$/, '') + .split('\n') + .reduce(parseKVLine, Object.create(null)) + +const parseKVLine = (set, line) => { + const n = parseInt(line, 10) + + // XXX Values with \n in them will fail this. + // Refactor to not be a naive line-by-line parse. + if (n !== Buffer.byteLength(line) + 1) + return set + + line = line.substr((n + ' ').length) + const kv = line.split('=') + const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1') + if (!k) + return set + + const v = kv.join('=') + set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) + ? new Date(v * 1000) + : /^[0-9]+$/.test(v) ? +v + : v + return set +} + +module.exports = Pax diff --git a/node_modules/tar/lib/read-entry.js b/node_modules/tar/lib/read-entry.js new file mode 100644 index 00000000..aa369c74 --- /dev/null +++ b/node_modules/tar/lib/read-entry.js @@ -0,0 +1,94 @@ +'use strict' +const types = require('./types.js') +const MiniPass = require('minipass') + +const SLURP = Symbol('slurp') +module.exports = class ReadEntry extends MiniPass { + constructor (header, ex, gex) { + super() + this.extended = ex + this.globalExtended = gex + this.header = header + this.startBlockSize = 512 * Math.ceil(header.size / 512) + this.blockRemain = this.startBlockSize + this.remain = header.size + this.type = header.type + this.meta = false + this.ignore = false + switch (this.type) { + case 'File': + case 'OldFile': + case 'Link': + case 'SymbolicLink': + case 'CharacterDevice': + case 'BlockDevice': + case 'Directory': + case 'FIFO': + case 'ContiguousFile': + case 'GNUDumpDir': + break + + case 'NextFileHasLongLinkpath': + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + case 'GlobalExtendedHeader': + case 'ExtendedHeader': + case 'OldExtendedHeader': + this.meta = true + break + + // NOTE: gnutar and bsdtar treat unrecognized types as 'File' + // it may be worth doing the same, but with a warning. + default: + this.ignore = true + } + + this.path = header.path + this.mode = header.mode + if (this.mode) + this.mode = this.mode & 0o7777 + this.uid = header.uid + this.gid = header.gid + this.uname = header.uname + this.gname = header.gname + this.size = header.size + this.mtime = header.mtime + this.atime = header.atime + this.ctime = header.ctime + this.linkpath = header.linkpath + this.uname = header.uname + this.gname = header.gname + + if (ex) this[SLURP](ex) + if (gex) this[SLURP](gex, true) + } + + write (data) { + const writeLen = data.length + if (writeLen > this.blockRemain) + throw new Error('writing more to entry than is appropriate') + + const r = this.remain + const br = this.blockRemain + this.remain = Math.max(0, r - writeLen) + this.blockRemain = Math.max(0, br - writeLen) + if (this.ignore) + return true + + if (r >= writeLen) + return super.write(data) + + // r < writeLen + return super.write(data.slice(0, r)) + } + + [SLURP] (ex, global) { + for (let k in ex) { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. + if (ex[k] !== null && ex[k] !== undefined && + !(global && k === 'path')) + this[k] = ex[k] + } + } +} diff --git a/node_modules/tar/lib/replace.js b/node_modules/tar/lib/replace.js new file mode 100644 index 00000000..571cee94 --- /dev/null +++ b/node_modules/tar/lib/replace.js @@ -0,0 +1,220 @@ +'use strict' +const Buffer = require('./buffer.js') + +// tar -r +const hlo = require('./high-level-opt.js') +const Pack = require('./pack.js') +const Parse = require('./parse.js') +const fs = require('fs') +const fsm = require('fs-minipass') +const t = require('./list.js') +const path = require('path') + +// starting at the head of the file, read a Header +// If the checksum is invalid, that's our position to start writing +// If it is, jump forward by the specified size (round up to 512) +// and try again. +// Write the new Pack stream starting there. + +const Header = require('./header.js') + +const r = module.exports = (opt_, files, cb) => { + const opt = hlo(opt_) + + if (!opt.file) + throw new TypeError('file is required') + + if (opt.gzip) + throw new TypeError('cannot append to compressed archives') + + if (!files || !Array.isArray(files) || !files.length) + throw new TypeError('no files or directories specified') + + files = Array.from(files) + + return opt.sync ? replaceSync(opt, files) + : replace(opt, files, cb) +} + +const replaceSync = (opt, files) => { + const p = new Pack.Sync(opt) + + let threw = true + let fd + let position + + try { + try { + fd = fs.openSync(opt.file, 'r+') + } catch (er) { + if (er.code === 'ENOENT') + fd = fs.openSync(opt.file, 'w+') + else + throw er + } + + const st = fs.fstatSync(fd) + const headBuf = Buffer.alloc(512) + + POSITION: for (position = 0; position < st.size; position += 512) { + for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { + bytes = fs.readSync( + fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos + ) + + if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) + throw new Error('cannot append to compressed archives') + + if (!bytes) + break POSITION + } + + let h = new Header(headBuf) + if (!h.cksumValid) + break + let entryBlockSize = 512 * Math.ceil(h.size / 512) + if (position + entryBlockSize + 512 > st.size) + break + // the 512 for the header we just parsed will be added as well + // also jump ahead all the blocks for the body + position += entryBlockSize + if (opt.mtimeCache) + opt.mtimeCache.set(h.path, h.mtime) + } + threw = false + + streamSync(opt, p, position, fd, files) + } finally { + if (threw) + try { fs.closeSync(fd) } catch (er) {} + } +} + +const streamSync = (opt, p, position, fd, files) => { + const stream = new fsm.WriteStreamSync(opt.file, { + fd: fd, + start: position + }) + p.pipe(stream) + addFilesSync(p, files) +} + +const replace = (opt, files, cb) => { + files = Array.from(files) + const p = new Pack(opt) + + const getPos = (fd, size, cb_) => { + const cb = (er, pos) => { + if (er) + fs.close(fd, _ => cb_(er)) + else + cb_(null, pos) + } + + let position = 0 + if (size === 0) + return cb(null, 0) + + let bufPos = 0 + const headBuf = Buffer.alloc(512) + const onread = (er, bytes) => { + if (er) + return cb(er) + bufPos += bytes + if (bufPos < 512 && bytes) + return fs.read( + fd, headBuf, bufPos, headBuf.length - bufPos, + position + bufPos, onread + ) + + if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) + return cb(new Error('cannot append to compressed archives')) + + // truncated header + if (bufPos < 512) + return cb(null, position) + + const h = new Header(headBuf) + if (!h.cksumValid) + return cb(null, position) + + const entryBlockSize = 512 * Math.ceil(h.size / 512) + if (position + entryBlockSize + 512 > size) + return cb(null, position) + + position += entryBlockSize + 512 + if (position >= size) + return cb(null, position) + + if (opt.mtimeCache) + opt.mtimeCache.set(h.path, h.mtime) + bufPos = 0 + fs.read(fd, headBuf, 0, 512, position, onread) + } + fs.read(fd, headBuf, 0, 512, position, onread) + } + + const promise = new Promise((resolve, reject) => { + p.on('error', reject) + let flag = 'r+' + const onopen = (er, fd) => { + if (er && er.code === 'ENOENT' && flag === 'r+') { + flag = 'w+' + return fs.open(opt.file, flag, onopen) + } + + if (er) + return reject(er) + + fs.fstat(fd, (er, st) => { + if (er) + return reject(er) + getPos(fd, st.size, (er, position) => { + if (er) + return reject(er) + const stream = new fsm.WriteStream(opt.file, { + fd: fd, + start: position + }) + p.pipe(stream) + stream.on('error', reject) + stream.on('close', resolve) + addFilesAsync(p, files) + }) + }) + } + fs.open(opt.file, flag, onopen) + }) + + return cb ? promise.then(cb, cb) : promise +} + +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') + t({ + file: path.resolve(p.cwd, file.substr(1)), + sync: true, + noResume: true, + onentry: entry => p.add(entry) + }) + else + p.add(file) + }) + p.end() +} + +const addFilesAsync = (p, files) => { + while (files.length) { + const file = files.shift() + if (file.charAt(0) === '@') + return t({ + file: path.resolve(p.cwd, file.substr(1)), + noResume: true, + onentry: entry => p.add(entry) + }).then(_ => addFilesAsync(p, files)) + else + p.add(file) + } + p.end() +} diff --git a/node_modules/tar/lib/types.js b/node_modules/tar/lib/types.js new file mode 100644 index 00000000..df425652 --- /dev/null +++ b/node_modules/tar/lib/types.js @@ -0,0 +1,44 @@ +'use strict' +// map types from key to human-friendly name +exports.name = new Map([ + ['0', 'File'], + // same as File + ['', 'OldFile'], + ['1', 'Link'], + ['2', 'SymbolicLink'], + // Devices and FIFOs aren't fully supported + // they are parsed, but skipped when unpacking + ['3', 'CharacterDevice'], + ['4', 'BlockDevice'], + ['5', 'Directory'], + ['6', 'FIFO'], + // same as File + ['7', 'ContiguousFile'], + // pax headers + ['g', 'GlobalExtendedHeader'], + ['x', 'ExtendedHeader'], + // vendor-specific stuff + // skip + ['A', 'SolarisACL'], + // like 5, but with data, which should be skipped + ['D', 'GNUDumpDir'], + // metadata only, skip + ['I', 'Inode'], + // data = link path of next file + ['K', 'NextFileHasLongLinkpath'], + // data = path of next file + ['L', 'NextFileHasLongPath'], + // skip + ['M', 'ContinuationFile'], + // like L + ['N', 'OldGnuLongPath'], + // skip + ['S', 'SparseFile'], + // skip + ['V', 'TapeVolumeHeader'], + // like x + ['X', 'OldExtendedHeader'] +]) + +// map the other direction +exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]])) diff --git a/node_modules/tar/lib/unpack.js b/node_modules/tar/lib/unpack.js new file mode 100644 index 00000000..fc765096 --- /dev/null +++ b/node_modules/tar/lib/unpack.js @@ -0,0 +1,621 @@ +'use strict' + +const assert = require('assert') +const EE = require('events').EventEmitter +const Parser = require('./parse.js') +const fs = require('fs') +const fsm = require('fs-minipass') +const path = require('path') +const mkdir = require('./mkdir.js') +const mkdirSync = mkdir.sync +const wc = require('./winchars.js') + +const ONENTRY = Symbol('onEntry') +const CHECKFS = Symbol('checkFs') +const ISREUSABLE = Symbol('isReusable') +const MAKEFS = Symbol('makeFs') +const FILE = Symbol('file') +const DIRECTORY = Symbol('directory') +const LINK = Symbol('link') +const SYMLINK = Symbol('symlink') +const HARDLINK = Symbol('hardlink') +const UNSUPPORTED = Symbol('unsupported') +const UNKNOWN = Symbol('unknown') +const CHECKPATH = Symbol('checkPath') +const MKDIR = Symbol('mkdir') +const ONERROR = Symbol('onError') +const PENDING = Symbol('pending') +const PEND = Symbol('pend') +const UNPEND = Symbol('unpend') +const ENDED = Symbol('ended') +const MAYBECLOSE = Symbol('maybeClose') +const SKIP = Symbol('skip') +const DOCHOWN = Symbol('doChown') +const UID = Symbol('uid') +const GID = Symbol('gid') +const crypto = require('crypto') + +// Unlinks on Windows are not atomic. +// +// This means that if you have a file entry, followed by another +// file entry with an identical name, and you cannot re-use the file +// (because it's a hardlink, or because unlink:true is set, or it's +// Windows, which does not have useful nlink values), then the unlink +// will be committed to the disk AFTER the new file has been written +// over the old one, deleting the new file. +// +// To work around this, on Windows systems, we rename the file and then +// delete the renamed file. It's a sloppy kludge, but frankly, I do not +// know of a better way to do this, given windows' non-atomic unlink +// semantics. +// +// See: https://github.com/npm/node-tar/issues/183 +/* istanbul ignore next */ +const unlinkFile = (path, cb) => { + if (process.platform !== 'win32') + return fs.unlink(path, cb) + + const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') + fs.rename(path, name, er => { + if (er) + return cb(er) + fs.unlink(name, cb) + }) +} + +/* istanbul ignore next */ +const unlinkFileSync = path => { + if (process.platform !== 'win32') + return fs.unlinkSync(path) + + const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') + fs.renameSync(path, name) + fs.unlinkSync(name) +} + +// this.gid, entry.gid, this.processUid +const uint32 = (a, b, c) => + a === a >>> 0 ? a + : b === b >>> 0 ? b + : c + +class Unpack extends Parser { + constructor (opt) { + if (!opt) + opt = {} + + opt.ondone = _ => { + this[ENDED] = true + this[MAYBECLOSE]() + } + + super(opt) + + this.transform = typeof opt.transform === 'function' ? opt.transform : null + + this.writable = true + this.readable = false + + this[PENDING] = 0 + this[ENDED] = false + + this.dirCache = opt.dirCache || new Map() + + if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { + // need both or neither + if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') + throw new TypeError('cannot set owner without number uid and gid') + if (opt.preserveOwner) + throw new TypeError( + 'cannot preserve owner in archive and also set owner explicitly') + this.uid = opt.uid + this.gid = opt.gid + this.setOwner = true + } else { + this.uid = null + this.gid = null + this.setOwner = false + } + + // default true for root + if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') + this.preserveOwner = process.getuid && process.getuid() === 0 + else + this.preserveOwner = !!opt.preserveOwner + + this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ? + process.getuid() : null + this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ? + process.getgid() : null + + // mostly just for testing, but useful in some cases. + // Forcibly trigger a chown on every entry, no matter what + this.forceChown = opt.forceChown === true + + // turn > this[ONENTRY](entry)) + } + + [MAYBECLOSE] () { + if (this[ENDED] && this[PENDING] === 0) { + this.emit('prefinish') + this.emit('finish') + this.emit('end') + this.emit('close') + } + } + + [CHECKPATH] (entry) { + if (this.strip) { + const parts = entry.path.split(/\/|\\/) + if (parts.length < this.strip) + return false + entry.path = parts.slice(this.strip).join('/') + + if (entry.type === 'Link') { + const linkparts = entry.linkpath.split(/\/|\\/) + if (linkparts.length >= this.strip) + entry.linkpath = linkparts.slice(this.strip).join('/') + } + } + + if (!this.preservePaths) { + const p = entry.path + if (p.match(/(^|\/|\\)\.\.(\\|\/|$)/)) { + this.warn('path contains \'..\'', p) + return false + } + + // absolutes on posix are also absolutes on win32 + // so we only need to test this one to get both + if (path.win32.isAbsolute(p)) { + const parsed = path.win32.parse(p) + this.warn('stripping ' + parsed.root + ' from absolute path', p) + entry.path = p.substr(parsed.root.length) + } + } + + // only encode : chars that aren't drive letter indicators + if (this.win32) { + const parsed = path.win32.parse(entry.path) + entry.path = parsed.root === '' ? wc.encode(entry.path) + : parsed.root + wc.encode(entry.path.substr(parsed.root.length)) + } + + if (path.isAbsolute(entry.path)) + entry.absolute = entry.path + else + entry.absolute = path.resolve(this.cwd, entry.path) + + return true + } + + [ONENTRY] (entry) { + if (!this[CHECKPATH](entry)) + return entry.resume() + + assert.equal(typeof entry.absolute, 'string') + + switch (entry.type) { + case 'Directory': + case 'GNUDumpDir': + if (entry.mode) + entry.mode = entry.mode | 0o700 + + case 'File': + case 'OldFile': + case 'ContiguousFile': + case 'Link': + case 'SymbolicLink': + return this[CHECKFS](entry) + + case 'CharacterDevice': + case 'BlockDevice': + case 'FIFO': + return this[UNSUPPORTED](entry) + } + } + + [ONERROR] (er, entry) { + // Cwd has to exist, or else nothing works. That's serious. + // Other errors are warnings, which raise the error in strict + // mode, but otherwise continue on. + if (er.name === 'CwdError') + this.emit('error', er) + else { + this.warn(er.message, er) + this[UNPEND]() + entry.resume() + } + } + + [MKDIR] (dir, mode, cb) { + mkdir(dir, { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode + }, cb) + } + + [DOCHOWN] (entry) { + // in preserve owner mode, chown if the entry doesn't match process + // in set owner mode, chown if setting doesn't match process + return this.forceChown || + this.preserveOwner && + ( typeof entry.uid === 'number' && entry.uid !== this.processUid || + typeof entry.gid === 'number' && entry.gid !== this.processGid ) + || + ( typeof this.uid === 'number' && this.uid !== this.processUid || + typeof this.gid === 'number' && this.gid !== this.processGid ) + } + + [UID] (entry) { + return uint32(this.uid, entry.uid, this.processUid) + } + + [GID] (entry) { + return uint32(this.gid, entry.gid, this.processGid) + } + + [FILE] (entry) { + const mode = entry.mode & 0o7777 || this.fmode + const stream = new fsm.WriteStream(entry.absolute, { + mode: mode, + autoClose: false + }) + stream.on('error', er => this[ONERROR](er, entry)) + + let actions = 1 + const done = er => { + if (er) + return this[ONERROR](er, entry) + + if (--actions === 0) + fs.close(stream.fd, _ => this[UNPEND]()) + } + + stream.on('finish', _ => { + // if futimes fails, try utimes + // if utimes fails, fail with the original error + // same for fchown/chown + const abs = entry.absolute + const fd = stream.fd + + if (entry.mtime && !this.noMtime) { + actions++ + const atime = entry.atime || new Date() + const mtime = entry.mtime + fs.futimes(fd, atime, mtime, er => + er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er)) + : done()) + } + + if (this[DOCHOWN](entry)) { + actions++ + const uid = this[UID](entry) + const gid = this[GID](entry) + fs.fchown(fd, uid, gid, er => + er ? fs.chown(abs, uid, gid, er2 => done(er2 && er)) + : done()) + } + + done() + }) + + const tx = this.transform ? this.transform(entry) || entry : entry + if (tx !== entry) { + tx.on('error', er => this[ONERROR](er, entry)) + entry.pipe(tx) + } + tx.pipe(stream) + } + + [DIRECTORY] (entry) { + const mode = entry.mode & 0o7777 || this.dmode + this[MKDIR](entry.absolute, mode, er => { + if (er) + return this[ONERROR](er, entry) + + let actions = 1 + const done = _ => { + if (--actions === 0) { + this[UNPEND]() + entry.resume() + } + } + + if (entry.mtime && !this.noMtime) { + actions++ + fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done) + } + + if (this[DOCHOWN](entry)) { + actions++ + fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done) + } + + done() + }) + } + + [UNSUPPORTED] (entry) { + this.warn('unsupported entry type: ' + entry.type, entry) + entry.resume() + } + + [SYMLINK] (entry) { + this[LINK](entry, entry.linkpath, 'symlink') + } + + [HARDLINK] (entry) { + this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link') + } + + [PEND] () { + this[PENDING]++ + } + + [UNPEND] () { + this[PENDING]-- + this[MAYBECLOSE]() + } + + [SKIP] (entry) { + this[UNPEND]() + entry.resume() + } + + // Check if we can reuse an existing filesystem entry safely and + // overwrite it, rather than unlinking and recreating + // Windows doesn't report a useful nlink, so we just never reuse entries + [ISREUSABLE] (entry, st) { + return entry.type === 'File' && + !this.unlink && + st.isFile() && + st.nlink <= 1 && + process.platform !== 'win32' + } + + // check if a thing is there, and if so, try to clobber it + [CHECKFS] (entry) { + this[PEND]() + this[MKDIR](path.dirname(entry.absolute), this.dmode, er => { + if (er) + return this[ONERROR](er, entry) + fs.lstat(entry.absolute, (er, st) => { + if (st && (this.keep || this.newer && st.mtime > entry.mtime)) + this[SKIP](entry) + else if (er || this[ISREUSABLE](entry, st)) + this[MAKEFS](null, entry) + else if (st.isDirectory()) { + if (entry.type === 'Directory') { + if (!entry.mode || (st.mode & 0o7777) === entry.mode) + this[MAKEFS](null, entry) + else + fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry)) + } else + fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry)) + } else + unlinkFile(entry.absolute, er => this[MAKEFS](er, entry)) + }) + }) + } + + [MAKEFS] (er, entry) { + if (er) + return this[ONERROR](er, entry) + + switch (entry.type) { + case 'File': + case 'OldFile': + case 'ContiguousFile': + return this[FILE](entry) + + case 'Link': + return this[HARDLINK](entry) + + case 'SymbolicLink': + return this[SYMLINK](entry) + + case 'Directory': + case 'GNUDumpDir': + return this[DIRECTORY](entry) + } + } + + [LINK] (entry, linkpath, link) { + // XXX: get the type ('file' or 'dir') for windows + fs[link](linkpath, entry.absolute, er => { + if (er) + return this[ONERROR](er, entry) + this[UNPEND]() + entry.resume() + }) + } +} + +class UnpackSync extends Unpack { + constructor (opt) { + super(opt) + } + + [CHECKFS] (entry) { + const er = this[MKDIR](path.dirname(entry.absolute), this.dmode) + if (er) + return this[ONERROR](er, entry) + try { + const st = fs.lstatSync(entry.absolute) + if (this.keep || this.newer && st.mtime > entry.mtime) + return this[SKIP](entry) + else if (this[ISREUSABLE](entry, st)) + return this[MAKEFS](null, entry) + else { + try { + if (st.isDirectory()) { + if (entry.type === 'Directory') { + if (entry.mode && (st.mode & 0o7777) !== entry.mode) + fs.chmodSync(entry.absolute, entry.mode) + } else + fs.rmdirSync(entry.absolute) + } else + unlinkFileSync(entry.absolute) + return this[MAKEFS](null, entry) + } catch (er) { + return this[ONERROR](er, entry) + } + } + } catch (er) { + return this[MAKEFS](null, entry) + } + } + + [FILE] (entry) { + const mode = entry.mode & 0o7777 || this.fmode + + const oner = er => { + try { fs.closeSync(fd) } catch (_) {} + if (er) + this[ONERROR](er, entry) + } + + let stream + let fd + try { + fd = fs.openSync(entry.absolute, 'w', mode) + } catch (er) { + return oner(er) + } + const tx = this.transform ? this.transform(entry) || entry : entry + if (tx !== entry) { + tx.on('error', er => this[ONERROR](er, entry)) + entry.pipe(tx) + } + + tx.on('data', chunk => { + try { + fs.writeSync(fd, chunk, 0, chunk.length) + } catch (er) { + oner(er) + } + }) + + tx.on('end', _ => { + let er = null + // try both, falling futimes back to utimes + // if either fails, handle the first error + if (entry.mtime && !this.noMtime) { + const atime = entry.atime || new Date() + const mtime = entry.mtime + try { + fs.futimesSync(fd, atime, mtime) + } catch (futimeser) { + try { + fs.utimesSync(entry.absolute, atime, mtime) + } catch (utimeser) { + er = futimeser + } + } + } + + if (this[DOCHOWN](entry)) { + const uid = this[UID](entry) + const gid = this[GID](entry) + + try { + fs.fchownSync(fd, uid, gid) + } catch (fchowner) { + try { + fs.chownSync(entry.absolute, uid, gid) + } catch (chowner) { + er = er || fchowner + } + } + } + + oner(er) + }) + } + + [DIRECTORY] (entry) { + const mode = entry.mode & 0o7777 || this.dmode + const er = this[MKDIR](entry.absolute, mode) + if (er) + return this[ONERROR](er, entry) + if (entry.mtime && !this.noMtime) { + try { + fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime) + } catch (er) {} + } + if (this[DOCHOWN](entry)) { + try { + fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry)) + } catch (er) {} + } + entry.resume() + } + + [MKDIR] (dir, mode) { + try { + return mkdir.sync(dir, { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode + }) + } catch (er) { + return er + } + } + + [LINK] (entry, linkpath, link) { + try { + fs[link + 'Sync'](linkpath, entry.absolute) + entry.resume() + } catch (er) { + return this[ONERROR](er, entry) + } + } +} + +Unpack.Sync = UnpackSync +module.exports = Unpack diff --git a/node_modules/tar/lib/update.js b/node_modules/tar/lib/update.js new file mode 100644 index 00000000..16c3e93e --- /dev/null +++ b/node_modules/tar/lib/update.js @@ -0,0 +1,36 @@ +'use strict' + +// tar -u + +const hlo = require('./high-level-opt.js') +const r = require('./replace.js') +// just call tar.r with the filter and mtimeCache + +const u = module.exports = (opt_, files, cb) => { + const opt = hlo(opt_) + + if (!opt.file) + throw new TypeError('file is required') + + if (opt.gzip) + throw new TypeError('cannot append to compressed archives') + + if (!files || !Array.isArray(files) || !files.length) + throw new TypeError('no files or directories specified') + + files = Array.from(files) + + mtimeFilter(opt) + return r(opt, files, cb) +} + +const mtimeFilter = opt => { + const filter = opt.filter + + if (!opt.mtimeCache) + opt.mtimeCache = new Map() + + opt.filter = filter ? (path, stat) => + filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime) + : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime) +} diff --git a/node_modules/tar/lib/warn-mixin.js b/node_modules/tar/lib/warn-mixin.js new file mode 100644 index 00000000..94a4b9b9 --- /dev/null +++ b/node_modules/tar/lib/warn-mixin.js @@ -0,0 +1,14 @@ +'use strict' +module.exports = Base => class extends Base { + warn (msg, data) { + if (!this.strict) + this.emit('warn', msg, data) + else if (data instanceof Error) + this.emit('error', data) + else { + const er = new Error(msg) + er.data = data + this.emit('error', er) + } + } +} diff --git a/node_modules/tar/lib/winchars.js b/node_modules/tar/lib/winchars.js new file mode 100644 index 00000000..cf6ea060 --- /dev/null +++ b/node_modules/tar/lib/winchars.js @@ -0,0 +1,23 @@ +'use strict' + +// When writing files on Windows, translate the characters to their +// 0xf000 higher-encoded versions. + +const raw = [ + '|', + '<', + '>', + '?', + ':' +] + +const win = raw.map(char => + String.fromCharCode(0xf000 + char.charCodeAt(0))) + +const toWin = new Map(raw.map((char, i) => [char, win[i]])) +const toRaw = new Map(win.map((char, i) => [char, raw[i]])) + +module.exports = { + encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s), + decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s) +} diff --git a/node_modules/tar/lib/write-entry.js b/node_modules/tar/lib/write-entry.js new file mode 100644 index 00000000..63f74948 --- /dev/null +++ b/node_modules/tar/lib/write-entry.js @@ -0,0 +1,422 @@ +'use strict' +const Buffer = require('./buffer.js') +const MiniPass = require('minipass') +const Pax = require('./pax.js') +const Header = require('./header.js') +const ReadEntry = require('./read-entry.js') +const fs = require('fs') +const path = require('path') + +const types = require('./types.js') +const maxReadSize = 16 * 1024 * 1024 +const PROCESS = Symbol('process') +const FILE = Symbol('file') +const DIRECTORY = Symbol('directory') +const SYMLINK = Symbol('symlink') +const HARDLINK = Symbol('hardlink') +const HEADER = Symbol('header') +const READ = Symbol('read') +const LSTAT = Symbol('lstat') +const ONLSTAT = Symbol('onlstat') +const ONREAD = Symbol('onread') +const ONREADLINK = Symbol('onreadlink') +const OPENFILE = Symbol('openfile') +const ONOPENFILE = Symbol('onopenfile') +const CLOSE = Symbol('close') +const MODE = Symbol('mode') +const warner = require('./warn-mixin.js') +const winchars = require('./winchars.js') + +const modeFix = require('./mode-fix.js') + +const WriteEntry = warner(class WriteEntry extends MiniPass { + constructor (p, opt) { + opt = opt || {} + super(opt) + if (typeof p !== 'string') + throw new TypeError('path is required') + this.path = p + // suppress atime, ctime, uid, gid, uname, gname + this.portable = !!opt.portable + // until node has builtin pwnam functions, this'll have to do + this.myuid = process.getuid && process.getuid() + this.myuser = process.env.USER || '' + this.maxReadSize = opt.maxReadSize || maxReadSize + this.linkCache = opt.linkCache || new Map() + this.statCache = opt.statCache || new Map() + this.preservePaths = !!opt.preservePaths + this.cwd = opt.cwd || process.cwd() + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.noMtime = !!opt.noMtime + this.mtime = opt.mtime || null + + if (typeof opt.onwarn === 'function') + this.on('warn', opt.onwarn) + + if (!this.preservePaths && path.win32.isAbsolute(p)) { + // absolutes on posix are also absolutes on win32 + // so we only need to test this one to get both + const parsed = path.win32.parse(p) + this.warn('stripping ' + parsed.root + ' from absolute path', p) + this.path = p.substr(parsed.root.length) + } + + this.win32 = !!opt.win32 || process.platform === 'win32' + if (this.win32) { + this.path = winchars.decode(this.path.replace(/\\/g, '/')) + p = p.replace(/\\/g, '/') + } + + this.absolute = opt.absolute || path.resolve(this.cwd, p) + + if (this.path === '') + this.path = './' + + if (this.statCache.has(this.absolute)) + this[ONLSTAT](this.statCache.get(this.absolute)) + else + this[LSTAT]() + } + + [LSTAT] () { + fs.lstat(this.absolute, (er, stat) => { + if (er) + return this.emit('error', er) + this[ONLSTAT](stat) + }) + } + + [ONLSTAT] (stat) { + this.statCache.set(this.absolute, stat) + this.stat = stat + if (!stat.isFile()) + stat.size = 0 + this.type = getType(stat) + this.emit('stat', stat) + this[PROCESS]() + } + + [PROCESS] () { + switch (this.type) { + case 'File': return this[FILE]() + case 'Directory': return this[DIRECTORY]() + case 'SymbolicLink': return this[SYMLINK]() + // unsupported types are ignored. + default: return this.end() + } + } + + [MODE] (mode) { + return modeFix(mode, this.type === 'Directory') + } + + [HEADER] () { + if (this.type === 'Directory' && this.portable) + this.noMtime = true + + this.header = new Header({ + path: this.path, + linkpath: this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this[MODE](this.stat.mode), + uid: this.portable ? null : this.stat.uid, + gid: this.portable ? null : this.stat.gid, + size: this.stat.size, + mtime: this.noMtime ? null : this.mtime || this.stat.mtime, + type: this.type, + uname: this.portable ? null : + this.stat.uid === this.myuid ? this.myuser : '', + atime: this.portable ? null : this.stat.atime, + ctime: this.portable ? null : this.stat.ctime + }) + + if (this.header.encode() && !this.noPax) + this.write(new Pax({ + atime: this.portable ? null : this.header.atime, + ctime: this.portable ? null : this.header.ctime, + gid: this.portable ? null : this.header.gid, + mtime: this.noMtime ? null : this.mtime || this.header.mtime, + path: this.path, + linkpath: this.linkpath, + size: this.header.size, + uid: this.portable ? null : this.header.uid, + uname: this.portable ? null : this.header.uname, + dev: this.portable ? null : this.stat.dev, + ino: this.portable ? null : this.stat.ino, + nlink: this.portable ? null : this.stat.nlink + }).encode()) + this.write(this.header.block) + } + + [DIRECTORY] () { + if (this.path.substr(-1) !== '/') + this.path += '/' + this.stat.size = 0 + this[HEADER]() + this.end() + } + + [SYMLINK] () { + fs.readlink(this.absolute, (er, linkpath) => { + if (er) + return this.emit('error', er) + this[ONREADLINK](linkpath) + }) + } + + [ONREADLINK] (linkpath) { + this.linkpath = linkpath + this[HEADER]() + this.end() + } + + [HARDLINK] (linkpath) { + this.type = 'Link' + this.linkpath = path.relative(this.cwd, linkpath) + this.stat.size = 0 + this[HEADER]() + this.end() + } + + [FILE] () { + if (this.stat.nlink > 1) { + const linkKey = this.stat.dev + ':' + this.stat.ino + if (this.linkCache.has(linkKey)) { + const linkpath = this.linkCache.get(linkKey) + if (linkpath.indexOf(this.cwd) === 0) + return this[HARDLINK](linkpath) + } + this.linkCache.set(linkKey, this.absolute) + } + + this[HEADER]() + if (this.stat.size === 0) + return this.end() + + this[OPENFILE]() + } + + [OPENFILE] () { + fs.open(this.absolute, 'r', (er, fd) => { + if (er) + return this.emit('error', er) + this[ONOPENFILE](fd) + }) + } + + [ONOPENFILE] (fd) { + const blockLen = 512 * Math.ceil(this.stat.size / 512) + const bufLen = Math.min(blockLen, this.maxReadSize) + const buf = Buffer.allocUnsafe(bufLen) + this[READ](fd, buf, 0, buf.length, 0, this.stat.size, blockLen) + } + + [READ] (fd, buf, offset, length, pos, remain, blockRemain) { + fs.read(fd, buf, offset, length, pos, (er, bytesRead) => { + if (er) + return this[CLOSE](fd, _ => this.emit('error', er)) + this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead) + }) + } + + [CLOSE] (fd, cb) { + fs.close(fd, cb) + } + + [ONREAD] (fd, buf, offset, length, pos, remain, blockRemain, bytesRead) { + if (bytesRead <= 0 && remain > 0) { + const er = new Error('encountered unexpected EOF') + er.path = this.absolute + er.syscall = 'read' + er.code = 'EOF' + this[CLOSE](fd) + return this.emit('error', er) + } + + if (bytesRead > remain) { + const er = new Error('did not encounter expected EOF') + er.path = this.absolute + er.syscall = 'read' + er.code = 'EOF' + this[CLOSE](fd) + return this.emit('error', er) + } + + // null out the rest of the buffer, if we could fit the block padding + if (bytesRead === remain) { + for (let i = bytesRead; i < length && bytesRead < blockRemain; i++) { + buf[i + offset] = 0 + bytesRead ++ + remain ++ + } + } + + const writeBuf = offset === 0 && bytesRead === buf.length ? + buf : buf.slice(offset, offset + bytesRead) + remain -= bytesRead + blockRemain -= bytesRead + pos += bytesRead + offset += bytesRead + + this.write(writeBuf) + + if (!remain) { + if (blockRemain) + this.write(Buffer.alloc(blockRemain)) + this.end() + this[CLOSE](fd, _ => _) + return + } + + if (offset >= length) { + buf = Buffer.allocUnsafe(length) + offset = 0 + } + length = buf.length - offset + this[READ](fd, buf, offset, length, pos, remain, blockRemain) + } +}) + +class WriteEntrySync extends WriteEntry { + constructor (path, opt) { + super(path, opt) + } + + [LSTAT] () { + this[ONLSTAT](fs.lstatSync(this.absolute)) + } + + [SYMLINK] () { + this[ONREADLINK](fs.readlinkSync(this.absolute)) + } + + [OPENFILE] () { + this[ONOPENFILE](fs.openSync(this.absolute, 'r')) + } + + [READ] (fd, buf, offset, length, pos, remain, blockRemain) { + let threw = true + try { + const bytesRead = fs.readSync(fd, buf, offset, length, pos) + this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead) + threw = false + } finally { + if (threw) + try { this[CLOSE](fd) } catch (er) {} + } + } + + [CLOSE] (fd) { + fs.closeSync(fd) + } +} + +const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { + constructor (readEntry, opt) { + opt = opt || {} + super(opt) + this.preservePaths = !!opt.preservePaths + this.portable = !!opt.portable + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.noMtime = !!opt.noMtime + + this.readEntry = readEntry + this.type = readEntry.type + if (this.type === 'Directory' && this.portable) + this.noMtime = true + + this.path = readEntry.path + this.mode = this[MODE](readEntry.mode) + this.uid = this.portable ? null : readEntry.uid + this.gid = this.portable ? null : readEntry.gid + this.uname = this.portable ? null : readEntry.uname + this.gname = this.portable ? null : readEntry.gname + this.size = readEntry.size + this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime + this.atime = this.portable ? null : readEntry.atime + this.ctime = this.portable ? null : readEntry.ctime + this.linkpath = readEntry.linkpath + + if (typeof opt.onwarn === 'function') + this.on('warn', opt.onwarn) + + if (path.isAbsolute(this.path) && !this.preservePaths) { + const parsed = path.parse(this.path) + this.warn( + 'stripping ' + parsed.root + ' from absolute path', + this.path + ) + this.path = this.path.substr(parsed.root.length) + } + + this.remain = readEntry.size + this.blockRemain = readEntry.startBlockSize + + this.header = new Header({ + path: this.path, + linkpath: this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this.mode, + uid: this.portable ? null : this.uid, + gid: this.portable ? null : this.gid, + size: this.size, + mtime: this.noMtime ? null : this.mtime, + type: this.type, + uname: this.portable ? null : this.uname, + atime: this.portable ? null : this.atime, + ctime: this.portable ? null : this.ctime + }) + + if (this.header.encode() && !this.noPax) + super.write(new Pax({ + atime: this.portable ? null : this.atime, + ctime: this.portable ? null : this.ctime, + gid: this.portable ? null : this.gid, + mtime: this.noMtime ? null : this.mtime, + path: this.path, + linkpath: this.linkpath, + size: this.size, + uid: this.portable ? null : this.uid, + uname: this.portable ? null : this.uname, + dev: this.portable ? null : this.readEntry.dev, + ino: this.portable ? null : this.readEntry.ino, + nlink: this.portable ? null : this.readEntry.nlink + }).encode()) + + super.write(this.header.block) + readEntry.pipe(this) + } + + [MODE] (mode) { + return modeFix(mode, this.type === 'Directory') + } + + write (data) { + const writeLen = data.length + if (writeLen > this.blockRemain) + throw new Error('writing more to entry than is appropriate') + this.blockRemain -= writeLen + return super.write(data) + } + + end () { + if (this.blockRemain) + this.write(Buffer.alloc(this.blockRemain)) + return super.end() + } +}) + +WriteEntry.Sync = WriteEntrySync +WriteEntry.Tar = WriteEntryTar + +const getType = stat => + stat.isFile() ? 'File' + : stat.isDirectory() ? 'Directory' + : stat.isSymbolicLink() ? 'SymbolicLink' + : 'Unsupported' + +module.exports = WriteEntry diff --git a/node_modules/tar/node_modules/.bin/mkdirp b/node_modules/tar/node_modules/.bin/mkdirp new file mode 120000 index 00000000..91a5f623 --- /dev/null +++ b/node_modules/tar/node_modules/.bin/mkdirp @@ -0,0 +1 @@ +../../../mkdirp/bin/cmd.js \ No newline at end of file diff --git a/node_modules/tar/node_modules/safe-buffer/LICENSE b/node_modules/tar/node_modules/safe-buffer/LICENSE new file mode 100644 index 00000000..0c068cee --- /dev/null +++ b/node_modules/tar/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/tar/node_modules/safe-buffer/README.md b/node_modules/tar/node_modules/safe-buffer/README.md new file mode 100644 index 00000000..356e3519 --- /dev/null +++ b/node_modules/tar/node_modules/safe-buffer/README.md @@ -0,0 +1,586 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +[Get supported safe-buffer with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-safe-buffer?utm_source=npm-safe-buffer&utm_medium=referral&utm_campaign=readme) + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/tar/node_modules/safe-buffer/index.d.ts b/node_modules/tar/node_modules/safe-buffer/index.d.ts new file mode 100644 index 00000000..e9fed809 --- /dev/null +++ b/node_modules/tar/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/tar/node_modules/safe-buffer/index.js b/node_modules/tar/node_modules/safe-buffer/index.js new file mode 100644 index 00000000..054c8d30 --- /dev/null +++ b/node_modules/tar/node_modules/safe-buffer/index.js @@ -0,0 +1,64 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/tar/node_modules/safe-buffer/package.json b/node_modules/tar/node_modules/safe-buffer/package.json new file mode 100644 index 00000000..d532dafb --- /dev/null +++ b/node_modules/tar/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.0", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json new file mode 100644 index 00000000..d70e2664 --- /dev/null +++ b/node_modules/tar/package.json @@ -0,0 +1,49 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "tar", + "description": "tar for node", + "version": "4.4.10", + "repository": { + "type": "git", + "url": "https://github.com/npm/node-tar.git" + }, + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "genparse": "node scripts/generate-parse-fixtures.js", + "bench": "for i in benchmarks/*/*.js; do echo $i; for j in {1..5}; do node $i || break; done; done" + }, + "dependencies": { + "chownr": "^1.1.1", + "fs-minipass": "^1.2.5", + "minipass": "^2.3.5", + "minizlib": "^1.2.1", + "mkdirp": "^0.5.0", + "safe-buffer": "^5.1.2", + "yallist": "^3.0.3" + }, + "devDependencies": { + "chmodr": "^1.2.0", + "end-of-stream": "^1.4.1", + "events-to-array": "^1.1.2", + "mutate-fs": "^2.1.1", + "rimraf": "^2.6.3", + "tap": "^14.2.0", + "tar-fs": "^1.16.3", + "tar-stream": "^1.6.2" + }, + "license": "ISC", + "engines": { + "node": ">=4.5" + }, + "files": [ + "index.js", + "lib/" + ], + "tap": { + "coverage-map": "map.js", + "check-coverage": true + } +} diff --git a/node_modules/term-size/index.js b/node_modules/term-size/index.js new file mode 100644 index 00000000..95e410df --- /dev/null +++ b/node_modules/term-size/index.js @@ -0,0 +1,70 @@ +'use strict'; +const path = require('path'); +const execa = require('execa'); + +const create = (columns, rows) => ({ + columns: parseInt(columns, 10), + rows: parseInt(rows, 10) +}); + +module.exports = () => { + const env = process.env; + const stdout = process.stdout; + const stderr = process.stderr; + + if (stdout && stdout.columns && stdout.rows) { + return create(stdout.columns, stdout.rows); + } + + if (stderr && stderr.columns && stderr.rows) { + return create(stderr.columns, stderr.rows); + } + + // These values are static, so not the first choice + if (env.COLUMNS && env.LINES) { + return create(env.COLUMNS, env.LINES); + } + + if (process.platform === 'win32') { + try { + // Binary: https://github.com/sindresorhus/win-term-size + const size = execa.sync(path.join(__dirname, 'vendor/windows/term-size.exe')).stdout.split(/\r?\n/); + + if (size.length === 2) { + return create(size[0], size[1]); + } + } catch (err) {} + } else { + if (process.platform === 'darwin') { + try { + // Binary: https://github.com/sindresorhus/macos-term-size + const size = execa.shellSync(path.join(__dirname, 'vendor/macos/term-size')).stdout.split(/\r?\n/); + + if (size.length === 2) { + return create(size[0], size[1]); + } + } catch (err) {} + } + + // `resize` is preferred as it works even when all file descriptors are redirected + // https://linux.die.net/man/1/resize + try { + const size = execa.sync('resize', ['-u']).stdout.match(/\d+/g); + + if (size.length === 2) { + return create(size[0], size[1]); + } + } catch (err) {} + + try { + const columns = execa.sync('tput', ['cols']).stdout; + const rows = execa.sync('tput', ['lines']).stdout; + + if (columns && rows) { + return create(columns, rows); + } + } catch (err) {} + } + + return create(80, 24); +}; diff --git a/node_modules/term-size/license b/node_modules/term-size/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/term-size/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/term-size/package.json b/node_modules/term-size/package.json new file mode 100644 index 00000000..798d7ebc --- /dev/null +++ b/node_modules/term-size/package.json @@ -0,0 +1,43 @@ +{ + "name": "term-size", + "version": "1.2.0", + "description": "Reliably get the terminal window size (columns & rows)", + "license": "MIT", + "repository": "sindresorhus/term-size", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js", + "vendor" + ], + "keywords": [ + "term", + "terminal", + "size", + "console", + "window", + "width", + "height", + "columns", + "rows", + "lines", + "tty", + "redirected" + ], + "dependencies": { + "execa": "^0.7.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/term-size/readme.md b/node_modules/term-size/readme.md new file mode 100644 index 00000000..dd642cad --- /dev/null +++ b/node_modules/term-size/readme.md @@ -0,0 +1,41 @@ +# term-size [![Build Status: Linux & macOS](https://travis-ci.org/sindresorhus/term-size.svg?branch=master)](https://travis-ci.org/sindresorhus/term-size) [![Build Status: Windows](https://ci.appveyor.com/api/projects/status/c3tydg6uedsk0bob/branch/master?svg=true)](https://ci.appveyor.com/project/sindresorhus/term-size/branch/master) + +> Reliably get the terminal window size + +Because [`process.stdout.columns`](https://nodejs.org/api/tty.html#tty_writestream_columns) doesn't exist when run [non-interactively](http://www.tldp.org/LDP/abs/html/intandnonint.html), for example, in a child process or when piped. This module even works when all the TTY file descriptors are redirected! + +Confirmed working on macOS, Linux, and Windows. + + +## Install + +``` +$ npm install --save term-size +``` + + +## Usage + +```js +const termSize = require('term-size'); + +termSize(); +//=> {columns: 143, rows: 24} +``` + + +## API + +### termSize() + +Returns an `Object` with `columns` and `rows` properties. + + +## Related + +- [term-size-cli](https://github.com/sindresorhus/term-size-cli) - CLI for this module + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/term-size/vendor/macos/term-size b/node_modules/term-size/vendor/macos/term-size new file mode 100755 index 0000000000000000000000000000000000000000..e383cc737f8e232aecd06a8492657b28d5fb9f1c GIT binary patch literal 8760 zcmeHNU1%It6u#SRHO6341ZnVxNpUreke{0B+M;AjmP|;rF+Vj}^){QGGz**EWp`E+ zA2bm9u!O)q`qoE%(+8z5vP2@-H>D3%RImuj1{D-RLBaL=?%dhV&Q|G@FWy7$ob%m# z?zwlqvja2td(I9~5PxETz6rN@b*+Eywmwt`T~ zl3PxdnqA>|Kk9f7YY!j$6FPOgJ9LK^Pp&!Hf>T6aI9@LZ=Kf~1KQVc4O56TcN^UWe zyQm$)@m|vL=JYsmP+DSRK+1CNwM;H8Gr85g!masuPsd} z@6V4%9FEuN2Uqd3%l{V7((y+09O7U+-fveR-p%J~X6m`=nnc7A+|BWxFfb8M3!9 zAGg{jcx;4)){Qn?E)|EfnbdH4Gn)pc+>ge6ygL|ud$RAzQy*Ts_wqx(#{2l3sd(LB z3vJNH(3An(FX!>O4hq+aqj8LTjy!%nN~W4?WBvKflIyGwof*PDGATGg9zo-JS1pgf z;~JA6BZdLPfMLKeU>GnA7zX|i3@qA}?-DO0&dtx-yHB5v2vLh}2{F1?{~UK;+28)N z;Z7z;f3tTNZ`9V@&hgQ`$~W~7FqWu%0_!rZ?MB0$ws#J{gX#88yywc|w>?*$y*5EGT3?d^{{&nI3<$d~u4jo1)+8?g~}J)y2+>iUGb@;hc`_rVBM zV!jdk1re%ixQK)A8;!;qzpM_vru6`=YqTDtb(I!hJ`ZZNCTP76YdGz^I_$ce;%vdm z;SiQ*3W2dYdxp+Z3Q(>h5^HXVZbn87%&VN z1`Gp+0mFb{z%XDKFbw>s3>-OTo#TQ>#wAPFX{T5$i06x>G-8~VnBFRIJ`xN%hM*OoA=;rxH z4C6sO62hlL_)G{-hcN5#!FWp{oC;y?m;c|u+BIvttl_fW$}<}jF#VeG_&@nssBE`$ hBo#-nI6g4iDD~~02@qkn#&%AI*_*;)(f-*0&EJjm8-Ovk_|qj$0nrx_n^ty+eL=UUa_-zByDqxj$ylpl z4*Sue7uROB9bwjjVuSHQ_8DLtfLVZ2fDKRuASAVon%xF8$$nlQ#}l10lY#rin5fYb z1M#5fmDcHpNRt0BW9<}(Ke7lT&~+M~vfCJY{WSP2s#5Z7k-hriqF}@Xpo%_KwcrbyzbfNX(I6cSp3OL2p|96=ctMvEw4^` zhw3PpZ8xS04|Y440r*tm{#wtOuA-!W zi%e5W)1Vl&yYZ;NBk}NU4kp{u|AMh-omd5FQeSm4I$oYN3)}{h)DB6RC%q#|X)P=p zdYbH`y(9kj)QSvRcim9t9s01?ApH$hMr-UAD2+Zf3}%aU`%UO5_fMl!os7L`{c>{h zdSK$4$-0TpI9U2gtQ$>nbgKl0MZ_SdHHHJk@EKsdBhjOa5=r=wYO>qdMqZmrv|1>m zTCkU+8l|yB2^^A}?B?`c=QB2H11dVcXe_=HQnT@1U7dXD9CYQMuU!*g{YZ!WO7!Tq z_rLvkRr*WV@fME9o9q`wk4&Z5L6nq3&?=KwnYGHMRV7+grd8!sMc0TmZz}+sxUy1r>3k=%v*#RiX|g&CaTb=J{+Ak zScl6%$@{USb-0Jv3WbeY5^yvpnhSjFuyyz{B96Yha+pLjuTZ|VS0BifYGIXs5Pjd^ zjehu7)}|jslSb<<8#>}Z+8cXOGDm-AkY+zc`ILD(`YR%vKVaolOX6!)>TZ5-v<#A! zc|2%`rBdsz)h{1+P8fE>KxTFB9mO8X5;VoKb3{(Xg&1u-vh!XU;k%{;eyg%`hcb_H z!Frz$x~f(A6C|3xz7*lR-slo$9sUm3o+eYGdtu17_D@*kKjO9L%C+|NvGcG}+IGmo z6N>q}(R~Jao81KQa;sfPPhjkt$Vg=QJOY!o!>VQ#w-dgju?9W~<-J767t4w95on$g&ck)rg|(3YUFxaZjVo4alr zd(H@g$+cJte72wA4j?aL>Hkz!%+-7hr|%^$${9+A*!wgBwWdLBHc_Q-1V!v{s*s3C zqBZstP~FWlZt?{*t7&0&VAU&E*1{Z0M+b8B zr?87wz_Z|z$FkVcbs$kn9h%*vBD^DO&jW{CZkJcTqrMnDYLZjR>URjgP{VWNsDQkR zsnR(#Aa3NE_P0aRD~TE!NlHtgrG7>?c0k6Y^ND;V$-yqGD>lh{k?eCoPMqqCXo%+{ zQ1>GQ*?E+Y>-m@}B*j!{>e=L0+7`79iX-5E^k`XvTbJZQxm6Phat>CRxnjHK@qsby zkF7AtMzTl-ko+qEv?`2yHu8C%{r=bp$xjs$f~GaIgC-y`48PPPd@ks*T3aoN zMKCNc5wI~bR=468j%Ot62NR=J7{#lQZz+hjjYp`NPraaQu-mTKU@uWitALQ7S4(TC zPN}7>RPW=nLlNDe(Sx=OP+py>{`cxnhCbm+%H*jJCgdy0iW8KL?0)>h=bFC15{6`s zkUu=XD^i7yZU>tqu%zM?;;V&JVH5g1d3mxjxm0jL)HHk_ZN-q{G>slbhx;izrxN6m z>^zoxc_Gk#2O?A2uUV;xAkJ4G(Np>*&RYdA#fX>mn4lpezs{$o{H`Fc$V^&0=<{TL1WP{iP47r` zQd_4p{?#U4MgZSvvNr?TV@W*MNef?n0iHSDz+3{zGaLF%Xjh<3TZB!y1P7 zFFIbU5viJJI&gD1ASCZ_UG{1C;CVB@P(Lt-%~&9JZhQ`E*|v%E~eiYy^_%7Nx~61CD)CP188 zx`n5?8EMo9XDZ_DA4lK%NV+PTc0`XnCsmSz1@nl5L^yO2z?Q&m?d>9Q2{yha3du1; zbi4|C$5{38$?LF@sijLmf#8)l@c9$c58@n<$XP*Yx!i>DG=fV8rZ>|$VS`-&Za;8o zFZ$ZDNxTmE&@@{=rf1*(t*Js9F~Gib8R8Qc960;Y2XUOv)4xD0Qrt8kv{U3#TTGAW^la;WUniF7V(WdYAuB;>?D?@cu!&y1Gl-5-=ST0EenP3%;l&_+hKQ9$ z6F*gb1VnVqJoFJE!!62r%JqVB7oCXXPta3#j$3yUt<5@IM6}bQbvVWwChKq;8Wafl zzomiv$MH$uHuQrYSEftPC}BZ4FFIx_c#tdD&K0aanEoQz6z4&WOy=m&O9`5x>cg?* zotDfKBgn05<`L$Oe~#C#=W`OTp;HqxOer>;Q{IfX9n2*UPPnujPe>c<@UK7}ckYM5 zW^Gc1-DsN>=OOX|25<&FOEPgSPFEoNW7i-#5Eweyj$w)?FGJtDYlz$5h72G2F~hBw zH~JJ|n6){j+%dkG+6yNPDS1l1<6!#RI0{;KIS(prNw3T~b78mM6(ga=By{I2u*!Re z4nUqdcr9e@IZ4na613BK@Sa(jXBDF6(s= z)PlVjRoa8k5&Yv@nSs{7v{)a&uG^hEFV^hyo4Ue_d^K{s1v&pIF&f}3T)>5k!K|5Ph#_dk&$s4fSuWs zSNfY6NC!St)y&Sid1M^F0LWt}U(-VxyHrMp_ZBFGdKyqCCbyyI9mx~R8QQ@x>B5kH zO_DdxgGTpWll7+^=l0rY{9=sjGo+o5%As}|@ls1g>|DycDYWs4LRsvmZXIoBIJs3I z3`zkd_fIe=?^9pFQO_JZE}4`@L9W5;P+4z^9gZB%N$Uh7NO4}!j#isl;uxQnNOnJ? z_Ye7iw;F|xBA9**If4CtOlrWkE9%1J0(9(hBhKVxV?sAk=C)>O8ck{`>BNDYwBkI; zXEAdumoNN%M>gPCC&GLysH&4V7-!>oLLf?YE!Y<>P>f3tMT#;j6w}gIr3!hk(qIDr zd#Cz(f;^?!Bwxo*Mc>E#olKnhH+e!?Ws~K2+hN}D>7G)n>9u$ zo;&f3jpXnT0M9#=jc@YsAf3-q)^X+aP#!xhABw!2DkPt%qzWkxyp?sjT$``VKjz}4 z%a<913-JC|?;j@qFmbWtW1;^^WX@eXY% zD>mRm20meoQGOv8yhBUcZq&y!KRWR~a7_o!VP-m$7p$ta?h4Ja?rJEB9gjB|%cP5{ z4~!`7=B2L<%r0m*Eqy6uj_xr;lNC$fi=;8fH}`6^HGj2+ocyKj0K>_;?znV`$Q_z2;wy9P^mb<3>y24h+5*AYu5h4bD%Lv*ZS z>2IVCC3aIbaLG-C_$)5Jfx z>0!|9*@=A~_PmW~*)RiS58u6M8&1E9LE;)CzP!s*sQ{muG=4g7fd5_Y4?LL#0owU> zP4)4d@(zUAEX^fbbL&Inze?l(xfYLm^5PNiG?ZC)wakhgkW$b+a=hVlqBI+u$NA9= z<^l)1ilzG_lhwz$RPTuVQ{Kqs8~o4dmjv=a(>RVaiY0m5iuWw=+WFuF|Bz1zp2qM1 zXOI>kMCMaMv{M^AS8>=snAnq^k8tJWWqzxlJotUX+BMZG*5u5LD3ZNQnnUrOrX!!G z>=I4hB#(#k)j@<4F2AN9=_owuMvY|O&=_^)*CNH4r;(4*-ykF<%5^vsL}n`ubSy28 z9ltZJ+-`=B^*0ZtIQev!ZXb}iz~id<4t_&>9erIBZ?&64r9+d{j?O_cpymSunT30R zSoZ}S`qc$Tu}G;->-9T&eMGNc*Xu)i{jy$9==DLp-mll=dY#nk@9FiEdi|JQKcd$U z>GfCjdPJ}9)$4oodXU%H9Yzu*ZlopxmG}AO?m~{_{^NXc)8sFY=H6#msA?7N4X99? zH_kr(Y$8ykTiT8}r1{Gg;Ee$4+b|{q=w{^!T%Y{eICs|2-x~qHZ&OcqcUQQ}J#hKr z>PolY&$_xoo*rLV@`U&|#hxx-z#9bC?KvHu3%F%*^;H`0b(_0Z>D-NeS2*kmGq%88 zg#XxAar|*?bg&~AI@s#Kt%2Y`K=gz{!4RzM>T^lG=u|AcT3isOUkfYO(z|K2Jar-N8N=D7g%=qG@GYeani4Gs9tDpeN#Yg?!Rhv7)18 zImXz^&7P3g9~=-vu0W3`%er!nbJeoem1`HW&R+an!4r=7B@sVmkb*%m+~@N9(c(X3 z@bqI4sy%M8V)crqEEm4V4@xe-=nsa&V$dsweLVr6*N0y~bbG{#&eoM39njtG3Q4{* zQnl+;j6Laau$KT+02{6+ngM=58~hnqJaAPF{h&hf^?5|E%jb`TJXaU7dbb;vg~bK^ zVy`PK27;om&(-5u9QJfezF>ff;u>E_in#nYL_DFboDn{FgCTK&Timo&@`Oc~B)W1^ zC~<^Oc^e>Q$+ zGJ2(?!W)n@&A+Tr+4Z9*TTP;g>i=4S24pjli?z>Al!pnCw!h|^nK2e z=Tz0G8!w{9>yW(;+-K=#s?j1EV?`FBx!8Ccs(RdrlDtm9p`@xdq6SUXofd&DLemeh z?NQYm3(%;ym}173xrNK_F4!(a3k$w(EE=odSD&hX9v0LWlbnYDj&W6`pGHECZ58AY z{z*XdY4AjLvLB?E{8$3|V?@C9HstggU&vE?T`{L^1)1r2RsDQE{jFq|<{zO?)*}K( zdKma_rMl75-e;-B^9D;Zp540bG;}g=1#rY$177+*j%sB6|o1!K3G` zxSFT-MnsG8{H;a(IL7}DbuM0R#A_Mi)mZG&TTS%E0L8;kbSYJ(bfZ?=eQEags%*f_JEYeDi5zz=%*S|b>-?uYt*2D#Zh zasziR=|K?zbn(;=Tpen%eLLkuv|#HPrhOk)sT=5Cz`dX$KTjK9r=_Y3y9OLPVPApi zSnegxlhBRD2tCm@=IueWUrzwWp{HHf6E!+5HYB1Rv0E%Ah{DD}zI+TwgZ46_MWNib zA(w<7>GQ^YX(eA;igagMijA$=c4IMQX)f|Ow1T%8yqA(N@J5a8AjQ_WmaF7{kI)aA zPSA7`O^3z28GG4sY4XM}UJ#xpJm8Nc;rDpvMm_^m)Zv=4S;$oGpK91Zw+d zI3J1Ny>c7oB@bucjCUe%rz^*Rx8d@x0^k6w0QdoefCmAO0lo*A0K5e_2C(2h@Uwuc z0jmI;0NVgx1N;+UCtw_K1n^q`6{7?_01RT?zzdK7e+Re+umkWA;1R&T0(JxT z0$u>T40s*z3&3vx(*PstfL%%6uWU>$(}zI*WzXe^01w~FxCwuD=bC{m5x=L|6>$4K z^^7?QN5b_BQN}!k>5y>u8tErH*0R6ln60?s?(4)YYJGT_&+nlY`wES8`nqpz4C00r zKdv*j1P}+8AGeVwyM9SQtH-tZ6f|2{&>D2P^@zZt2MYL=Ydh|pgP}gU5$3`X>mH%Q zBdrefa@*a`E#01ex-*3}h%}Rg4!_6K&+ZaBJ)u5d027uC8k2=ie;B5&arq-2_MmV^ zA#A6h-}{1>?r<4s%)dOQ+c$Gbv3ShrgULn1r@>>&5EE>~Z;r;Dq=AIEfcYpwGP zU0(W&K*nx1aD{NZk8MIbxC!SBLl^FmZ)IU)mu_8iL02Teal-{&1KoXYe}K&wT$_R+ ziCrcjT`|VkRe~mcnc(dY`2v!cwFzEiDRvFPJAlMS>u!Or^@;jRLZ7FvyMHTVUlDY2 z_HBV~@Lf1uJu8Hzklz!)FN&xY=-=1&EkI;u6M3l;cUI)a{dhq)RjQr z|0eW>H+P4mtajQR&z8TW2vnWp$hl*);XAsrnLd42Hv%|Y{(>Vg;$Y=~iEalI2hYy+ zU+_%-9@h-~^D6FU22Uh3FnnaB&upjQ@z*T;K5E^=I8)O-i(aOUHE6A29jpsaCtHP9 z3tP!npx=V$GPJ2YE4=#&Zc4a(>XV+|SJpv~0rDgk!uce?dLYe*bBPC%1I){C)MPcB zrV96GRctAs1~!mItJsC$Z3ImpUfV9vZ-q53^l&IZ>4j7YBVl$m`#k=Ixdt~hdC(4` z?qw0QDp@y|K|Gt-Jjl%2(8MXbxo!QBAoE_R2H#Wc{}*d<;SfFdJM=`7bIn^ hqbO<_h5KZNWJd%-pa>v#e$@H%J4bk&2>z_fe*q!}CAt6r literal 0 HcmV?d00001 diff --git a/node_modules/thenify-all/History.md b/node_modules/thenify-all/History.md new file mode 100644 index 00000000..16e378c0 --- /dev/null +++ b/node_modules/thenify-all/History.md @@ -0,0 +1,11 @@ + +1.6.0 / 2015-01-11 +================== + + * feat: exports thenify + * support node 0.8+ + +1.5.0 / 2015-01-09 +================== + + * feat: support backward compatible with callback diff --git a/node_modules/thenify-all/LICENSE b/node_modules/thenify-all/LICENSE new file mode 100644 index 00000000..a7ae8ee9 --- /dev/null +++ b/node_modules/thenify-all/LICENSE @@ -0,0 +1,22 @@ + +The MIT License (MIT) + +Copyright (c) 2014 Jonathan Ong me@jongleberry.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/thenify-all/README.md b/node_modules/thenify-all/README.md new file mode 100644 index 00000000..8e7829e9 --- /dev/null +++ b/node_modules/thenify-all/README.md @@ -0,0 +1,66 @@ + +# thenify-all + +[![NPM version][npm-image]][npm-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] +[![Dependency Status][david-image]][david-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] +[![Gittip][gittip-image]][gittip-url] + +Promisifies all the selected functions in an object. + +```js +var thenifyAll = require('thenify-all'); + +var fs = thenifyAll(require('fs'), {}, [ + 'readFile', + 'writeFile', +]); + +fs.readFile(__filename).then(function (buffer) { + console.log(buffer.toString()); +}); +``` + +## API + +### var obj = thenifyAll(source, [obj], [methods]) + +Promisifies all the selected functions in an object. + +- `source` - the source object for the async functions +- `obj` - the destination to set all the promisified methods +- `methods` - an array of method names of `source` + +### var obj = thenifyAll.withCallback(source, [obj], [methods]) + +Promisifies all the selected functions in an object and backward compatible with callback. + +- `source` - the source object for the async functions +- `obj` - the destination to set all the promisified methods +- `methods` - an array of method names of `source` + +### thenifyAll.thenify + +Exports [thenify](https://github.com/thenables/thenify) this package uses. + +[gitter-image]: https://badges.gitter.im/thenables/thenify-all.png +[gitter-url]: https://gitter.im/thenables/thenify-all +[npm-image]: https://img.shields.io/npm/v/thenify-all.svg?style=flat-square +[npm-url]: https://npmjs.org/package/thenify-all +[github-tag]: http://img.shields.io/github/tag/thenables/thenify-all.svg?style=flat-square +[github-url]: https://github.com/thenables/thenify-all/tags +[travis-image]: https://img.shields.io/travis/thenables/thenify-all.svg?style=flat-square +[travis-url]: https://travis-ci.org/thenables/thenify-all +[coveralls-image]: https://img.shields.io/coveralls/thenables/thenify-all.svg?style=flat-square +[coveralls-url]: https://coveralls.io/r/thenables/thenify-all +[david-image]: http://img.shields.io/david/thenables/thenify-all.svg?style=flat-square +[david-url]: https://david-dm.org/thenables/thenify-all +[license-image]: http://img.shields.io/npm/l/thenify-all.svg?style=flat-square +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/thenify-all.svg?style=flat-square +[downloads-url]: https://npmjs.org/package/thenify-all +[gittip-image]: https://img.shields.io/gratipay/jonathanong.svg?style=flat-square +[gittip-url]: https://gratipay.com/jonathanong/ diff --git a/node_modules/thenify-all/index.js b/node_modules/thenify-all/index.js new file mode 100644 index 00000000..e0cc69ce --- /dev/null +++ b/node_modules/thenify-all/index.js @@ -0,0 +1,73 @@ + +var thenify = require('thenify') + +module.exports = thenifyAll +thenifyAll.withCallback = withCallback +thenifyAll.thenify = thenify + +/** + * Promisifies all the selected functions in an object. + * + * @param {Object} source the source object for the async functions + * @param {Object} [destination] the destination to set all the promisified methods + * @param {Array} [methods] an array of method names of `source` + * @return {Object} + * @api public + */ + +function thenifyAll(source, destination, methods) { + return promisifyAll(source, destination, methods, thenify) +} + +/** + * Promisifies all the selected functions in an object and backward compatible with callback. + * + * @param {Object} source the source object for the async functions + * @param {Object} [destination] the destination to set all the promisified methods + * @param {Array} [methods] an array of method names of `source` + * @return {Object} + * @api public + */ + +function withCallback(source, destination, methods) { + return promisifyAll(source, destination, methods, thenify.withCallback) +} + +function promisifyAll(source, destination, methods, promisify) { + if (!destination) { + destination = {}; + methods = Object.keys(source) + } + + if (Array.isArray(destination)) { + methods = destination + destination = {} + } + + if (!methods) { + methods = Object.keys(source) + } + + if (typeof source === 'function') destination = promisify(source) + + methods.forEach(function (name) { + // promisify only if it's a function + if (typeof source[name] === 'function') destination[name] = promisify(source[name]) + }) + + // proxy the rest + Object.keys(source).forEach(function (name) { + if (deprecated(source, name)) return + if (destination[name]) return + destination[name] = source[name] + }) + + return destination +} + +function deprecated(source, name) { + var desc = Object.getOwnPropertyDescriptor(source, name) + if (!desc || !desc.get) return false + if (desc.get.name === 'deprecated') return true + return false +} diff --git a/node_modules/thenify-all/package.json b/node_modules/thenify-all/package.json new file mode 100644 index 00000000..768800fa --- /dev/null +++ b/node_modules/thenify-all/package.json @@ -0,0 +1,34 @@ +{ + "name": "thenify-all", + "description": "Promisifies all the selected functions in an object", + "version": "1.6.0", + "author": "Jonathan Ong (http://jongleberry.com)", + "license": "MIT", + "repository": "thenables/thenify-all", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "devDependencies": { + "bluebird": "2", + "istanbul": "0", + "mocha": "2" + }, + "scripts": { + "test": "mocha --reporter spec", + "test-cov": "istanbul cover node_modules/.bin/_mocha -- --reporter dot", + "test-travis": "istanbul cover node_modules/.bin/_mocha --report lcovonly -- --reporter dot" + }, + "keywords": [ + "promisify", + "promise", + "thenify", + "then", + "es6" + ], + "files": [ + "index.js" + ], + "engines": { + "node": ">=0.8" + } +} diff --git a/node_modules/thenify/History.md b/node_modules/thenify/History.md new file mode 100644 index 00000000..734e5d8c --- /dev/null +++ b/node_modules/thenify/History.md @@ -0,0 +1,5 @@ + +3.3.0 / 2017-05-19 +================== + + * feat: support options.multiArgs and options.withCallback (#27) diff --git a/node_modules/thenify/LICENSE b/node_modules/thenify/LICENSE new file mode 100644 index 00000000..bed701c6 --- /dev/null +++ b/node_modules/thenify/LICENSE @@ -0,0 +1,22 @@ + +The MIT License (MIT) + +Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/thenify/README.md b/node_modules/thenify/README.md new file mode 100644 index 00000000..3afce3e0 --- /dev/null +++ b/node_modules/thenify/README.md @@ -0,0 +1,120 @@ + +# thenify + +[![NPM version][npm-image]][npm-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] +[![Dependency Status][david-image]][david-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +Promisify a callback-based function using [`any-promise`](https://github.com/kevinbeaty/any-promise). + +- Preserves function names +- Uses a native promise implementation if available and tries to fall back to a promise implementation such as `bluebird` +- Converts multiple arguments from the callback into an `Array`, also support change the behavior by `options.multiArgs` +- Resulting function never deoptimizes +- Supports both callback and promise style + +An added benefit is that `throw`n errors in that async function will be caught by the promise! + +## API + +### fn = thenify(fn, options) + +Promisifies a function. + +### Options + +`options` are optional. + +- `options.withCallback` - support both callback and promise style, default to `false`. +- `options.multiArgs` - change the behavior when callback have multiple arguments. default to `true`. + - `true` - converts multiple arguments to an array + - `false`- always use the first argument + - `Array` - converts multiple arguments to an object with keys provided in `options.multiArgs` + +- Turn async functions into promises + +```js +var thenify = require('thenify'); + +var somethingAsync = thenify(function somethingAsync(a, b, c, callback) { + callback(null, a, b, c); +}); +``` + +- Backward compatible with callback + +```js +var thenify = require('thenify'); + +var somethingAsync = thenify(function somethingAsync(a, b, c, callback) { + callback(null, a, b, c); +}, { withCallback: true }); + +// somethingAsync(a, b, c).then(onFulfilled).catch(onRejected); +// somethingAsync(a, b, c, function () {}); +``` + +or use `thenify.withCallback()` + +```js +var thenify = require('thenify').withCallback; + +var somethingAsync = thenify(function somethingAsync(a, b, c, callback) { + callback(null, a, b, c); +}); + +// somethingAsync(a, b, c).then(onFulfilled).catch(onRejected); +// somethingAsync(a, b, c, function () {}); +``` + +- Always return the first argument in callback + +```js +var thenify = require('thenify'); + +var promise = thenify(function (callback) { + callback(null, 1, 2, 3); +}, { multiArgs: false }); + +// promise().then(function onFulfilled(value) { +// assert.equal(value, 1); +// }); +``` + +- Converts callback arguments to an object + +```js +var thenify = require('thenify'); + +var promise = thenify(function (callback) { + callback(null, 1, 2, 3); +}, { multiArgs: [ 'one', 'tow', 'three' ] }); + +// promise().then(function onFulfilled(value) { +// assert.deepEqual(value, { +// one: 1, +// tow: 2, +// three: 3 +// }); +// }); +``` + +[gitter-image]: https://badges.gitter.im/thenables/thenify.png +[gitter-url]: https://gitter.im/thenables/thenify +[npm-image]: https://img.shields.io/npm/v/thenify.svg?style=flat-square +[npm-url]: https://npmjs.org/package/thenify +[github-tag]: http://img.shields.io/github/tag/thenables/thenify.svg?style=flat-square +[github-url]: https://github.com/thenables/thenify/tags +[travis-image]: https://img.shields.io/travis/thenables/thenify.svg?style=flat-square +[travis-url]: https://travis-ci.org/thenables/thenify +[coveralls-image]: https://img.shields.io/coveralls/thenables/thenify.svg?style=flat-square +[coveralls-url]: https://coveralls.io/r/thenables/thenify +[david-image]: http://img.shields.io/david/thenables/thenify.svg?style=flat-square +[david-url]: https://david-dm.org/thenables/thenify +[license-image]: http://img.shields.io/npm/l/thenify.svg?style=flat-square +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/thenify.svg?style=flat-square +[downloads-url]: https://npmjs.org/package/thenify diff --git a/node_modules/thenify/index.js b/node_modules/thenify/index.js new file mode 100644 index 00000000..161d32e6 --- /dev/null +++ b/node_modules/thenify/index.js @@ -0,0 +1,80 @@ + +var Promise = require('any-promise') +var assert = require('assert') + +module.exports = thenify + +/** + * Turn async functions into promises + * + * @param {Function} $$__fn__$$ + * @return {Function} + * @api public + */ + +function thenify($$__fn__$$, options) { + assert(typeof $$__fn__$$ === 'function') + return eval(createWrapper($$__fn__$$.name, options)) +} + +/** + * Turn async functions into promises and backward compatible with callback + * + * @param {Function} $$__fn__$$ + * @return {Function} + * @api public + */ + +thenify.withCallback = function ($$__fn__$$, options) { + assert(typeof $$__fn__$$ === 'function') + options = options || {} + options.withCallback = true + if (options.multiArgs === undefined) options.multiArgs = true + return eval(createWrapper($$__fn__$$.name, options)) +} + +function createCallback(resolve, reject, multiArgs) { + return function(err, value) { + if (err) return reject(err) + var length = arguments.length + + if (length <= 2 || !multiArgs) return resolve(value) + + if (Array.isArray(multiArgs)) { + var values = {} + for (var i = 1; i < length; i++) values[multiArgs[i - 1]] = arguments[i] + return resolve(values) + } + + var values = new Array(length - 1) + for (var i = 1; i < length; ++i) values[i - 1] = arguments[i] + resolve(values) + } +} + +function createWrapper(name, options) { + name = (name || '').replace(/\s|bound(?!$)/g, '') + options = options || {} + // default to true + var multiArgs = options.multiArgs !== undefined ? options.multiArgs : true + multiArgs = 'var multiArgs = ' + JSON.stringify(multiArgs) + '\n' + + var withCallback = options.withCallback ? + 'var lastType = typeof arguments[len - 1]\n' + + 'if (lastType === "function") return $$__fn__$$.apply(self, arguments)\n' + : '' + + return '(function ' + name + '() {\n' + + 'var self = this\n' + + 'var len = arguments.length\n' + + multiArgs + + withCallback + + 'var args = new Array(len + 1)\n' + + 'for (var i = 0; i < len; ++i) args[i] = arguments[i]\n' + + 'var lastIndex = i\n' + + 'return new Promise(function (resolve, reject) {\n' + + 'args[lastIndex] = createCallback(resolve, reject, multiArgs)\n' + + '$$__fn__$$.apply(self, args)\n' + + '})\n' + + '})' +} diff --git a/node_modules/thenify/package.json b/node_modules/thenify/package.json new file mode 100644 index 00000000..994ffdb2 --- /dev/null +++ b/node_modules/thenify/package.json @@ -0,0 +1,31 @@ +{ + "name": "thenify", + "description": "Promisify a callback-based function", + "version": "3.3.0", + "author": "Jonathan Ong (http://jongleberry.com)", + "license": "MIT", + "repository": "thenables/thenify", + "dependencies": { + "any-promise": "^1.0.0" + }, + "devDependencies": { + "bluebird": "^3.1.1", + "istanbul": "^0.4.0", + "mocha": "^3.0.2" + }, + "scripts": { + "test": "mocha --reporter spec", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter dot" + }, + "keywords": [ + "promisify", + "promise", + "thenify", + "then", + "es6" + ], + "files": [ + "index.js" + ] +} diff --git a/node_modules/timed-out/index.js b/node_modules/timed-out/index.js new file mode 100644 index 00000000..94007a4a --- /dev/null +++ b/node_modules/timed-out/index.js @@ -0,0 +1,55 @@ +'use strict'; + +module.exports = function (req, time) { + if (req.timeoutTimer) { + return req; + } + + var delays = isNaN(time) ? time : {socket: time, connect: time}; + var host = req._headers ? (' to ' + req._headers.host) : ''; + + if (delays.connect !== undefined) { + req.timeoutTimer = setTimeout(function timeoutHandler() { + req.abort(); + var e = new Error('Connection timed out on request' + host); + e.code = 'ETIMEDOUT'; + req.emit('error', e); + }, delays.connect); + } + + // Clear the connection timeout timer once a socket is assigned to the + // request and is connected. + req.on('socket', function assign(socket) { + // Socket may come from Agent pool and may be already connected. + if (!(socket.connecting || socket._connecting)) { + connect(); + return; + } + + socket.once('connect', connect); + }); + + function clear() { + if (req.timeoutTimer) { + clearTimeout(req.timeoutTimer); + req.timeoutTimer = null; + } + } + + function connect() { + clear(); + + if (delays.socket !== undefined) { + // Abort the request if there is no activity on the socket for more + // than `delays.socket` milliseconds. + req.setTimeout(delays.socket, function socketTimeoutHandler() { + req.abort(); + var e = new Error('Socket timed out on request' + host); + e.code = 'ESOCKETTIMEDOUT'; + req.emit('error', e); + }); + } + } + + return req.on('error', clear); +}; diff --git a/node_modules/timed-out/license b/node_modules/timed-out/license new file mode 100644 index 00000000..faadd528 --- /dev/null +++ b/node_modules/timed-out/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Vsevolod Strukchinsky + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/timed-out/package.json b/node_modules/timed-out/package.json new file mode 100644 index 00000000..b86baa70 --- /dev/null +++ b/node_modules/timed-out/package.json @@ -0,0 +1,36 @@ +{ + "name": "timed-out", + "version": "4.0.1", + "description": "Emit `ETIMEDOUT` or `ESOCKETTIMEDOUT` when ClientRequest is hanged", + "license": "MIT", + "repository": "floatdrop/timed-out", + "author": { + "name": "Vsevolod Strukchinsky", + "email": "floatdrop@gmail.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && mocha" + }, + "files": [ + "index.js" + ], + "keywords": [ + "http", + "https", + "get", + "got", + "url", + "uri", + "request", + "util", + "utility", + "simple" + ], + "devDependencies": { + "mocha": "*", + "xo": "^0.16.0" + } +} diff --git a/node_modules/timed-out/readme.md b/node_modules/timed-out/readme.md new file mode 100644 index 00000000..fa0a0356 --- /dev/null +++ b/node_modules/timed-out/readme.md @@ -0,0 +1,42 @@ +# timed-out [![Build Status](https://travis-ci.org/floatdrop/timed-out.svg?branch=master)](https://travis-ci.org/floatdrop/timed-out) + +> Timeout HTTP/HTTPS requests + +Emit Error object with `code` property equal `ETIMEDOUT` or `ESOCKETTIMEDOUT` when ClientRequest is hanged. + +## Usage + +```js +var get = require('http').get; +var timeout = require('timed-out'); + +var req = get('http://www.google.ru'); +timeout(req, 2000); // Set 2 seconds limit +``` + +### API + +#### timedout(request, time) + +##### request + +*Required* +Type: [`ClientRequest`](http://nodejs.org/api/http.html#http_class_http_clientrequest) + +The request to watch on. + +##### time + +*Required* +Type: `number` or `object` + +Time in milliseconds to wait for `connect` event on socket and also time to wait on inactive socket. + +Or you can pass Object with following fields: + +- `connect` - time to wait for connection +- `socket` - time to wait for activity on socket + +## License + +MIT © [Vsevolod Strukchinsky](floatdrop@gmail.com) diff --git a/node_modules/to-object-path/LICENSE b/node_modules/to-object-path/LICENSE new file mode 100644 index 00000000..1e49edf8 --- /dev/null +++ b/node_modules/to-object-path/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-object-path/README.md b/node_modules/to-object-path/README.md new file mode 100644 index 00000000..7f3cfb16 --- /dev/null +++ b/node_modules/to-object-path/README.md @@ -0,0 +1,71 @@ +# to-object-path [![NPM version](https://badge.fury.io/js/to-object-path.svg)](http://badge.fury.io/js/to-object-path) + +> Create an object path from a list or array of strings. + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i to-object-path --save +``` + +## Usage + +```js +var toPath = require('to-object-path'); + +toPath('foo', 'bar', 'baz'); +toPath('foo', ['bar', 'baz']); +//=> 'foo.bar.baz' +``` + +Also supports passing an arguments object (without having to slice args): + +```js +function foo() + return toPath(arguments); +} + +foo('foo', 'bar', 'baz'); +foo('foo', ['bar', 'baz']); +//=> 'foo.bar.baz' +``` + +Visit the [example](./example.js) to see how this could be used in an application. + +## Related projects + +* [get-value](https://www.npmjs.com/package/get-value): Use property paths (` a.b.c`) to get a nested value from an object. | [homepage](https://github.com/jonschlinkert/get-value) +* [has-value](https://www.npmjs.com/package/has-value): Returns true if a value exists, false if empty. Works with deeply nested values using… [more](https://www.npmjs.com/package/has-value) | [homepage](https://github.com/jonschlinkert/has-value) +* [omit-value](https://www.npmjs.com/package/omit-value): Omit properties from an object or deeply nested property of an object using object path… [more](https://www.npmjs.com/package/omit-value) | [homepage](https://github.com/jonschlinkert/omit-value) +* [set-value](https://www.npmjs.com/package/set-value): Create nested values and any intermediaries using dot notation (`'a.b.c'`) paths. | [homepage](https://github.com/jonschlinkert/set-value) +* [unset-value](https://www.npmjs.com/package/unset-value): Delete nested properties from an object using dot notation. | [homepage](https://github.com/jonschlinkert/unset-value) + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/to-object-path/issues/new). + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on October 28, 2015._ \ No newline at end of file diff --git a/node_modules/to-object-path/index.js b/node_modules/to-object-path/index.js new file mode 100644 index 00000000..489f8f68 --- /dev/null +++ b/node_modules/to-object-path/index.js @@ -0,0 +1,33 @@ +/*! + * to-object-path + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var typeOf = require('kind-of'); + +module.exports = function toPath(args) { + if (typeOf(args) !== 'arguments') { + args = arguments; + } + return filter(args).join('.'); +}; + +function filter(arr) { + var len = arr.length; + var idx = -1; + var res = []; + + while (++idx < len) { + var ele = arr[idx]; + if (typeOf(ele) === 'arguments' || Array.isArray(ele)) { + res.push.apply(res, filter(ele)); + } else if (typeof ele === 'string') { + res.push(ele); + } + } + return res; +} diff --git a/node_modules/to-object-path/package.json b/node_modules/to-object-path/package.json new file mode 100644 index 00000000..2fe341b7 --- /dev/null +++ b/node_modules/to-object-path/package.json @@ -0,0 +1,48 @@ +{ + "name": "to-object-path", + "description": "Create an object path from a list or array of strings.", + "version": "0.3.0", + "homepage": "https://github.com/jonschlinkert/to-object-path", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/to-object-path", + "bugs": { + "url": "https://github.com/jonschlinkert/to-object-path/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "kind-of": "^3.0.2" + }, + "devDependencies": { + "base": "^0.6.7", + "mocha": "*" + }, + "keywords": [ + "dot", + "nested", + "notation", + "object", + "path", + "stringify" + ], + "verb": { + "related": { + "list": [ + "get-value", + "set-value", + "has-value", + "omit-value", + "unset-value" + ] + } + } +} diff --git a/node_modules/to-regex-range/LICENSE b/node_modules/to-regex-range/LICENSE new file mode 100644 index 00000000..83b56e70 --- /dev/null +++ b/node_modules/to-regex-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-regex-range/README.md b/node_modules/to-regex-range/README.md new file mode 100644 index 00000000..2763c5ae --- /dev/null +++ b/node_modules/to-regex-range/README.md @@ -0,0 +1,281 @@ +# to-regex-range [![NPM version](https://img.shields.io/npm/v/to-regex-range.svg?style=flat)](https://www.npmjs.com/package/to-regex-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![NPM total downloads](https://img.shields.io/npm/dt/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![Linux Build Status](https://img.shields.io/travis/micromatch/to-regex-range.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/to-regex-range) + +> Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save to-regex-range +``` + +Install with [yarn](https://yarnpkg.com): + +```sh +$ yarn add to-regex-range +``` + +
+What does this do? + +
+ +This libary generates the `source` string to be passed to `new RegExp()` for matching a range of numbers. + +**Example** + +```js +var toRegexRange = require('to-regex-range'); +var regex = new RegExp(toRegexRange('15', '95')); +``` + +A string is returned so that you can do whatever you need with it before passing it to `new RegExp()` (like adding `^` or `$` boundaries, defining flags, or combining it another string). + +
+ +
+ +
+Why use this library? + +
+ +### Convenience + +Creating regular expressions for matching numbers gets deceptively complicated pretty fast. + +For example, let's say you need a validation regex for matching part of a user-id, postal code, social security number, tax id, etc: + +* regex for matching `1` => `/1/` (easy enough) +* regex for matching `1` through `5` => `/[1-5]/` (not bad...) +* regex for matching `1` or `5` => `/(1|5)/` (still easy...) +* regex for matching `1` through `50` => `/([1-9]|[1-4][0-9]|50)/` (uh-oh...) +* regex for matching `1` through `55` => `/([1-9]|[1-4][0-9]|5[0-5])/` (no prob, I can do this...) +* regex for matching `1` through `555` => `/([1-9]|[1-9][0-9]|[1-4][0-9]{2}|5[0-4][0-9]|55[0-5])/` (maybe not...) +* regex for matching `0001` through `5555` => `/(0{3}[1-9]|0{2}[1-9][0-9]|0[1-9][0-9]{2}|[1-4][0-9]{3}|5[0-4][0-9]{2}|55[0-4][0-9]|555[0-5])/` (okay, I get the point!) + +The numbers are contrived, but they're also really basic. In the real world you might need to generate a regex on-the-fly for validation. + +**Learn more** + +If you're interested in learning more about [character classes](http://www.regular-expressions.info/charclass.html) and other regex features, I personally have always found [regular-expressions.info](http://www.regular-expressions.info/charclass.html) to be pretty useful. + +### Heavily tested + +As of April 27, 2017, this library runs [2,783,483 test assertions](./test/test.js) against generated regex-ranges to provide brute-force verification that results are indeed correct. + +Tests run in ~870ms on my MacBook Pro, 2.5 GHz Intel Core i7. + +### Highly optimized + +Generated regular expressions are highly optimized: + +* duplicate sequences and character classes are reduced using quantifiers +* smart enough to use `?` conditionals when number(s) or range(s) can be positive or negative +* uses fragment caching to avoid processing the same exact string more than once + +
+ +
+ +## Usage + +Add this library to your javascript application with the following line of code + +```js +var toRegexRange = require('to-regex-range'); +``` + +The main export is a function that takes two integers: the `min` value and `max` value (formatted as strings or numbers). + +```js +var source = toRegexRange('15', '95'); +//=> 1[5-9]|[2-8][0-9]|9[0-5] + +var re = new RegExp('^' + source + '$'); +console.log(re.test('14')); //=> false +console.log(re.test('50')); //=> true +console.log(re.test('94')); //=> true +console.log(re.test('96')); //=> false +``` + +## Options + +### options.capture + +**Type**: `boolean` + +**Deafault**: `undefined` + +Wrap the returned value in parentheses when there is more than one regex condition. Useful when you're dynamically generating ranges. + +```js +console.log(toRegexRange('-10', '10')); +//=> -[1-9]|-?10|[0-9] + +console.log(toRegexRange('-10', '10', {capture: true})); +//=> (-[1-9]|-?10|[0-9]) +``` + +### options.shorthand + +**Type**: `boolean` + +**Deafault**: `undefined` + +Use the regex shorthand for `[0-9]`: + +```js +console.log(toRegexRange('0', '999999')); +//=> [0-9]|[1-9][0-9]{1,5} + +console.log(toRegexRange('0', '999999', {shorthand: true})); +//=> \d|[1-9]\d{1,5} +``` + +### options.relaxZeros + +**Type**: `boolean` + +**Default**: `true` + +This option only applies to **negative zero-padded ranges**. By default, when a negative zero-padded range is defined, the number of leading zeros is relaxed using `-0*`. + +```js +console.log(toRegexRange('-001', '100')); +//=> -0*1|0{2}[0-9]|0[1-9][0-9]|100 + +console.log(toRegexRange('-001', '100', {relaxZeros: false})); +//=> -0{2}1|0{2}[0-9]|0[1-9][0-9]|100 +``` + +
+Why are zeros relaxed for negative zero-padded ranges by default? + +Consider the following. + +```js +var regex = toRegexRange('-001', '100'); +``` + +_Note that `-001` and `100` are both three digits long_. + +In most zero-padding implementations, only a single leading zero is enough to indicate that zero-padding should be applied. Thus, the leading zeros would be "corrected" on the negative range in the example to `-01`, instead of `-001`, to make total length of each string no greater than the length of the largest number in the range (in other words, `-001` is 4 digits, but `100` is only three digits). + +If zeros were not relaxed by default, you might expect the resulting regex of the above pattern to match `-001` - given that it's defined that way in the arguments - _but it wouldn't_. It would, however, match `-01`. This gets even more ambiguous with large ranges, like `-01` to `1000000`. + +Thus, we relax zeros by default to provide a more predictable experience for users. + +
+ +## Examples + +| **Range** | **Result** | **Compile time** | +| --- | --- | --- | +| `toRegexRange('5, 5')` | `5` | _33μs_ | +| `toRegexRange('5, 6')` | `5\|6` | _53μs_ | +| `toRegexRange('29, 51')` | `29\|[34][0-9]\|5[01]` | _699μs_ | +| `toRegexRange('31, 877')` | `3[1-9]\|[4-9][0-9]\|[1-7][0-9]{2}\|8[0-6][0-9]\|87[0-7]` | _711μs_ | +| `toRegexRange('111, 555')` | `11[1-9]\|1[2-9][0-9]\|[2-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _62μs_ | +| `toRegexRange('-10, 10')` | `-[1-9]\|-?10\|[0-9]` | _74μs_ | +| `toRegexRange('-100, -10')` | `-1[0-9]\|-[2-9][0-9]\|-100` | _49μs_ | +| `toRegexRange('-100, 100')` | `-[1-9]\|-?[1-9][0-9]\|-?100\|[0-9]` | _45μs_ | +| `toRegexRange('001, 100')` | `0{2}[1-9]\|0[1-9][0-9]\|100` | _158μs_ | +| `toRegexRange('0010, 1000')` | `0{2}1[0-9]\|0{2}[2-9][0-9]\|0[1-9][0-9]{2}\|1000` | _61μs_ | +| `toRegexRange('1, 2')` | `1\|2` | _10μs_ | +| `toRegexRange('1, 5')` | `[1-5]` | _24μs_ | +| `toRegexRange('1, 10')` | `[1-9]\|10` | _23μs_ | +| `toRegexRange('1, 100')` | `[1-9]\|[1-9][0-9]\|100` | _30μs_ | +| `toRegexRange('1, 1000')` | `[1-9]\|[1-9][0-9]{1,2}\|1000` | _52μs_ | +| `toRegexRange('1, 10000')` | `[1-9]\|[1-9][0-9]{1,3}\|10000` | _47μs_ | +| `toRegexRange('1, 100000')` | `[1-9]\|[1-9][0-9]{1,4}\|100000` | _44μs_ | +| `toRegexRange('1, 1000000')` | `[1-9]\|[1-9][0-9]{1,5}\|1000000` | _49μs_ | +| `toRegexRange('1, 10000000')` | `[1-9]\|[1-9][0-9]{1,6}\|10000000` | _63μs_ | + +## Heads up! + +**Order of arguments** + +When the `min` is larger than the `max`, values will be flipped to create a valid range: + +```js +toRegexRange('51', '29'); +``` + +Is effectively flipped to: + +```js +toRegexRange('29', '51'); +//=> 29|[3-4][0-9]|5[0-1] +``` + +**Steps / increments** + +This library does not support steps (increments). A pr to add support would be welcome. + +## History + +### v2.0.0 - 2017-04-21 + +**New features** + +Adds support for zero-padding! + +### v1.0.0 + +**Optimizations** + +Repeating ranges are now grouped using quantifiers. rocessing time is roughly the same, but the generated regex is much smaller, which should result in faster matching. + +## Attribution + +Inspired by the python library [range-regex](https://github.com/dimka665/range-regex). + +## About + +### Related projects + +* [expand-range](https://www.npmjs.com/package/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. See… [more](https://github.com/jonschlinkert/expand-range) | [homepage](https://github.com/jonschlinkert/expand-range "Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. See the benchmarks. Used by micromatch.") +* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") +* [repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") +* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string "Repeat the given string n times. Fastest implementation for repeating a string.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on April 27, 2017._ \ No newline at end of file diff --git a/node_modules/to-regex-range/index.js b/node_modules/to-regex-range/index.js new file mode 100644 index 00000000..7bb5a743 --- /dev/null +++ b/node_modules/to-regex-range/index.js @@ -0,0 +1,294 @@ +/*! + * to-regex-range + * + * Copyright (c) 2015, 2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var repeat = require('repeat-string'); +var isNumber = require('is-number'); +var cache = {}; + +function toRegexRange(min, max, options) { + if (isNumber(min) === false) { + throw new RangeError('toRegexRange: first argument is invalid.'); + } + + if (typeof max === 'undefined' || min === max) { + return String(min); + } + + if (isNumber(max) === false) { + throw new RangeError('toRegexRange: second argument is invalid.'); + } + + options = options || {}; + var relax = String(options.relaxZeros); + var shorthand = String(options.shorthand); + var capture = String(options.capture); + var key = min + ':' + max + '=' + relax + shorthand + capture; + if (cache.hasOwnProperty(key)) { + return cache[key].result; + } + + var a = Math.min(min, max); + var b = Math.max(min, max); + + if (Math.abs(a - b) === 1) { + var result = min + '|' + max; + if (options.capture) { + return '(' + result + ')'; + } + return result; + } + + var isPadded = padding(min) || padding(max); + var positives = []; + var negatives = []; + + var tok = {min: min, max: max, a: a, b: b}; + if (isPadded) { + tok.isPadded = isPadded; + tok.maxLen = String(tok.max).length; + } + + if (a < 0) { + var newMin = b < 0 ? Math.abs(b) : 1; + var newMax = Math.abs(a); + negatives = splitToPatterns(newMin, newMax, tok, options); + a = tok.a = 0; + } + + if (b >= 0) { + positives = splitToPatterns(a, b, tok, options); + } + + tok.negatives = negatives; + tok.positives = positives; + tok.result = siftPatterns(negatives, positives, options); + + if (options.capture && (positives.length + negatives.length) > 1) { + tok.result = '(' + tok.result + ')'; + } + + cache[key] = tok; + return tok.result; +} + +function siftPatterns(neg, pos, options) { + var onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; + var onlyPositive = filterPatterns(pos, neg, '', false, options) || []; + var intersected = filterPatterns(neg, pos, '-?', true, options) || []; + var subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); + return subpatterns.join('|'); +} + +function splitToRanges(min, max) { + min = Number(min); + max = Number(max); + + var nines = 1; + var stops = [max]; + var stop = +countNines(min, nines); + + while (min <= stop && stop <= max) { + stops = push(stops, stop); + nines += 1; + stop = +countNines(min, nines); + } + + var zeros = 1; + stop = countZeros(max + 1, zeros) - 1; + + while (min < stop && stop <= max) { + stops = push(stops, stop); + zeros += 1; + stop = countZeros(max + 1, zeros) - 1; + } + + stops.sort(compare); + return stops; +} + +/** + * Convert a range to a regex pattern + * @param {Number} `start` + * @param {Number} `stop` + * @return {String} + */ + +function rangeToPattern(start, stop, options) { + if (start === stop) { + return {pattern: String(start), digits: []}; + } + + var zipped = zip(String(start), String(stop)); + var len = zipped.length, i = -1; + + var pattern = ''; + var digits = 0; + + while (++i < len) { + var numbers = zipped[i]; + var startDigit = numbers[0]; + var stopDigit = numbers[1]; + + if (startDigit === stopDigit) { + pattern += startDigit; + + } else if (startDigit !== '0' || stopDigit !== '9') { + pattern += toCharacterClass(startDigit, stopDigit); + + } else { + digits += 1; + } + } + + if (digits) { + pattern += options.shorthand ? '\\d' : '[0-9]'; + } + + return { pattern: pattern, digits: [digits] }; +} + +function splitToPatterns(min, max, tok, options) { + var ranges = splitToRanges(min, max); + var len = ranges.length; + var idx = -1; + + var tokens = []; + var start = min; + var prev; + + while (++idx < len) { + var range = ranges[idx]; + var obj = rangeToPattern(start, range, options); + var zeros = ''; + + if (!tok.isPadded && prev && prev.pattern === obj.pattern) { + if (prev.digits.length > 1) { + prev.digits.pop(); + } + prev.digits.push(obj.digits[0]); + prev.string = prev.pattern + toQuantifier(prev.digits); + start = range + 1; + continue; + } + + if (tok.isPadded) { + zeros = padZeros(range, tok); + } + + obj.string = zeros + obj.pattern + toQuantifier(obj.digits); + tokens.push(obj); + start = range + 1; + prev = obj; + } + + return tokens; +} + +function filterPatterns(arr, comparison, prefix, intersection, options) { + var res = []; + + for (var i = 0; i < arr.length; i++) { + var tok = arr[i]; + var ele = tok.string; + + if (options.relaxZeros !== false) { + if (prefix === '-' && ele.charAt(0) === '0') { + if (ele.charAt(1) === '{') { + ele = '0*' + ele.replace(/^0\{\d+\}/, ''); + } else { + ele = '0*' + ele.slice(1); + } + } + } + + if (!intersection && !contains(comparison, 'string', ele)) { + res.push(prefix + ele); + } + + if (intersection && contains(comparison, 'string', ele)) { + res.push(prefix + ele); + } + } + return res; +} + +/** + * Zip strings (`for in` can be used on string characters) + */ + +function zip(a, b) { + var arr = []; + for (var ch in a) arr.push([a[ch], b[ch]]); + return arr; +} + +function compare(a, b) { + return a > b ? 1 : b > a ? -1 : 0; +} + +function push(arr, ele) { + if (arr.indexOf(ele) === -1) arr.push(ele); + return arr; +} + +function contains(arr, key, val) { + for (var i = 0; i < arr.length; i++) { + if (arr[i][key] === val) { + return true; + } + } + return false; +} + +function countNines(min, len) { + return String(min).slice(0, -len) + repeat('9', len); +} + +function countZeros(integer, zeros) { + return integer - (integer % Math.pow(10, zeros)); +} + +function toQuantifier(digits) { + var start = digits[0]; + var stop = digits[1] ? (',' + digits[1]) : ''; + if (!stop && (!start || start === 1)) { + return ''; + } + return '{' + start + stop + '}'; +} + +function toCharacterClass(a, b) { + return '[' + a + ((b - a === 1) ? '' : '-') + b + ']'; +} + +function padding(str) { + return /^-?(0+)\d/.exec(str); +} + +function padZeros(val, tok) { + if (tok.isPadded) { + var diff = Math.abs(tok.maxLen - String(val).length); + switch (diff) { + case 0: + return ''; + case 1: + return '0'; + default: { + return '0{' + diff + '}'; + } + } + } + return val; +} + +/** + * Expose `toRegexRange` + */ + +module.exports = toRegexRange; diff --git a/node_modules/to-regex-range/package.json b/node_modules/to-regex-range/package.json new file mode 100644 index 00000000..6c8e6b1c --- /dev/null +++ b/node_modules/to-regex-range/package.json @@ -0,0 +1,86 @@ +{ + "name": "to-regex-range", + "description": "Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions.", + "version": "2.1.1", + "homepage": "https://github.com/micromatch/to-regex-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "micromatch/to-regex-range", + "bugs": { + "url": "https://github.com/micromatch/to-regex-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "devDependencies": { + "fill-range": "^3.1.1", + "gulp-format-md": "^0.1.12", + "mocha": "^3.2.0", + "text-table": "^0.2.0", + "time-diff": "^0.3.1" + }, + "keywords": [ + "alpha", + "alphabetical", + "bash", + "brace", + "date", + "expand", + "expansion", + "glob", + "match", + "matches", + "matching", + "number", + "numerical", + "range", + "ranges", + "regex", + "sequence", + "sh", + "to", + "year" + ], + "verb": { + "related": { + "list": [ + "expand-range", + "fill-range", + "micromatch", + "repeat-element", + "repeat-string" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "helpers": [ + "./examples.js" + ], + "reflinks": [ + "0-5", + "0-9", + "1-5", + "1-9" + ] + } +} diff --git a/node_modules/to-regex/LICENSE b/node_modules/to-regex/LICENSE new file mode 100644 index 00000000..7c9987bc --- /dev/null +++ b/node_modules/to-regex/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-regex/README.md b/node_modules/to-regex/README.md new file mode 100644 index 00000000..5a28fc33 --- /dev/null +++ b/node_modules/to-regex/README.md @@ -0,0 +1,205 @@ +# to-regex [![NPM version](https://img.shields.io/npm/v/to-regex.svg?style=flat)](https://www.npmjs.com/package/to-regex) [![NPM monthly downloads](https://img.shields.io/npm/dm/to-regex.svg?style=flat)](https://npmjs.org/package/to-regex) [![NPM total downloads](https://img.shields.io/npm/dt/to-regex.svg?style=flat)](https://npmjs.org/package/to-regex) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/to-regex.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/to-regex) + +> Generate a regex from a string or array of strings. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +- [Install](#install) +- [Usage](#usage) +- [Options](#options) + * [options.contains](#optionscontains) + * [options.negate](#optionsnegate) + * [options.nocase](#optionsnocase) + * [options.flags](#optionsflags) + * [options.cache](#optionscache) + * [options.safe](#optionssafe) +- [About](#about) + * [Related projects](#related-projects) + * [Author](#author) + * [License](#license) + +_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_ + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save to-regex +``` + +## Usage + +```js +var toRegex = require('to-regex'); + +console.log(toRegex('foo')); +//=> /^(?:foo)$/ + +console.log(toRegex('foo', {negate: true})); +//=> /^(?:(?:(?!^(?:foo)$).)*)$/ + +console.log(toRegex('foo', {contains: true})); +//=> /(?:foo)/ + +console.log(toRegex(['foo', 'bar'], {negate: true})); +//=> /^(?:(?:(?!^(?:(?:foo)|(?:bar))$).)*)$/ + +console.log(toRegex(['foo', 'bar'], {negate: true, contains: true})); +//=> /^(?:(?:(?!(?:(?:foo)|(?:bar))).)*)$/ +``` + +## Options + +### options.contains + +**Type**: `Boolean` + +**Default**: `undefined` + +Generate a regex that will match any string that _contains_ the given pattern. By default, regex is strict will only return true for exact matches. + +```js +var toRegex = require('to-regex'); +console.log(toRegex('foo', {contains: true})); +//=> /(?:foo)/ +``` + +### options.negate + +**Type**: `Boolean` + +**Default**: `undefined` + +Create a regex that will match everything except the given pattern. + +```js +var toRegex = require('to-regex'); +console.log(toRegex('foo', {negate: true})); +//=> /^(?:(?:(?!^(?:foo)$).)*)$/ +``` + +### options.nocase + +**Type**: `Boolean` + +**Default**: `undefined` + +Adds the `i` flag, to enable case-insensitive matching. + +```js +var toRegex = require('to-regex'); +console.log(toRegex('foo', {nocase: true})); +//=> /^(?:foo)$/i +``` + +Alternatively you can pass the flags you want directly on [options.flags](#options.flags). + +### options.flags + +**Type**: `String` + +**Default**: `undefined` + +Define the flags you want to use on the generated regex. + +```js +var toRegex = require('to-regex'); +console.log(toRegex('foo', {flags: 'gm'})); +//=> /^(?:foo)$/gm +console.log(toRegex('foo', {flags: 'gmi', nocase: true})); //<= handles redundancy +//=> /^(?:foo)$/gmi +``` + +### options.cache + +**Type**: `Boolean` + +**Default**: `true` + +Generated regex is cached based on the provided string and options. As a result, runtime compilation only happens once per pattern (as long as options are also the same), which can result in dramatic speed improvements. + +This also helps with debugging, since adding options and pattern are added to the generated regex. + +**Disable caching** + +```js +toRegex('foo', {cache: false}); +``` + +### options.safe + +**Type**: `Boolean` + +**Default**: `undefined` + +Check the generated regular expression with [safe-regex](https://github.com/substack/safe-regex) and throw an error if the regex is potentially unsafe. + +**Examples** + +```js +console.log(toRegex('(x+x+)+y')); +//=> /^(?:(x+x+)+y)$/ + +// The following would throw an error +toRegex('(x+x+)+y', {safe: true}); +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.") +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [path-regex](https://www.npmjs.com/package/path-regex): Regular expression for matching the parts of a file path. | [homepage](https://github.com/regexps/path-regex "Regular expression for matching the parts of a file path.") +* [to-regex-range](https://www.npmjs.com/package/to-regex-range): Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than… [more](https://github.com/micromatch/to-regex-range) | [homepage](https://github.com/micromatch/to-regex-range "Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions.") + +### Author + +**Jon Schlinkert** + +* [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on February 24, 2018._ \ No newline at end of file diff --git a/node_modules/to-regex/index.js b/node_modules/to-regex/index.js new file mode 100644 index 00000000..a87d0159 --- /dev/null +++ b/node_modules/to-regex/index.js @@ -0,0 +1,155 @@ +'use strict'; + +var safe = require('safe-regex'); +var define = require('define-property'); +var extend = require('extend-shallow'); +var not = require('regex-not'); +var MAX_LENGTH = 1024 * 64; + +/** + * Session cache + */ + +var cache = {}; + +/** + * Create a regular expression from the given `pattern` string. + * + * @param {String|RegExp} `pattern` Pattern can be a string or regular expression. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ + +module.exports = function(patterns, options) { + if (!Array.isArray(patterns)) { + return makeRe(patterns, options); + } + return makeRe(patterns.join('|'), options); +}; + +/** + * Create a regular expression from the given `pattern` string. + * + * @param {String|RegExp} `pattern` Pattern can be a string or regular expression. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ + +function makeRe(pattern, options) { + if (pattern instanceof RegExp) { + return pattern; + } + + if (typeof pattern !== 'string') { + throw new TypeError('expected a string'); + } + + if (pattern.length > MAX_LENGTH) { + throw new Error('expected pattern to be less than ' + MAX_LENGTH + ' characters'); + } + + var key = pattern; + // do this before shallow cloning options, it's a lot faster + if (!options || (options && options.cache !== false)) { + key = createKey(pattern, options); + + if (cache.hasOwnProperty(key)) { + return cache[key]; + } + } + + var opts = extend({}, options); + if (opts.contains === true) { + if (opts.negate === true) { + opts.strictNegate = false; + } else { + opts.strict = false; + } + } + + if (opts.strict === false) { + opts.strictOpen = false; + opts.strictClose = false; + } + + var open = opts.strictOpen !== false ? '^' : ''; + var close = opts.strictClose !== false ? '$' : ''; + var flags = opts.flags || ''; + var regex; + + if (opts.nocase === true && !/i/.test(flags)) { + flags += 'i'; + } + + try { + if (opts.negate || typeof opts.strictNegate === 'boolean') { + pattern = not.create(pattern, opts); + } + + var str = open + '(?:' + pattern + ')' + close; + regex = new RegExp(str, flags); + + if (opts.safe === true && safe(regex) === false) { + throw new Error('potentially unsafe regular expression: ' + regex.source); + } + + } catch (err) { + if (opts.strictErrors === true || opts.safe === true) { + err.key = key; + err.pattern = pattern; + err.originalOptions = options; + err.createdOptions = opts; + throw err; + } + + try { + regex = new RegExp('^' + pattern.replace(/(\W)/g, '\\$1') + '$'); + } catch (err) { + regex = /.^/; //<= match nothing + } + } + + if (opts.cache !== false) { + memoize(regex, key, pattern, opts); + } + return regex; +} + +/** + * Memoize generated regex. This can result in dramatic speed improvements + * and simplify debugging by adding options and pattern to the regex. It can be + * disabled by passing setting `options.cache` to false. + */ + +function memoize(regex, key, pattern, options) { + define(regex, 'cached', true); + define(regex, 'pattern', pattern); + define(regex, 'options', options); + define(regex, 'key', key); + cache[key] = regex; +} + +/** + * Create the key to use for memoization. The key is generated + * by iterating over the options and concatenating key-value pairs + * to the pattern string. + */ + +function createKey(pattern, options) { + if (!options) return pattern; + var key = pattern; + for (var prop in options) { + if (options.hasOwnProperty(prop)) { + key += ';' + prop + '=' + String(options[prop]); + } + } + return key; +} + +/** + * Expose `makeRe` + */ + +module.exports.makeRe = makeRe; diff --git a/node_modules/to-regex/node_modules/define-property/CHANGELOG.md b/node_modules/to-regex/node_modules/define-property/CHANGELOG.md new file mode 100644 index 00000000..901c8aae --- /dev/null +++ b/node_modules/to-regex/node_modules/define-property/CHANGELOG.md @@ -0,0 +1,82 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [2.0.0] - 2017-04-20 + +### Changed + +- Now supports data descriptors in addition to accessor descriptors. +- Now uses [Reflect.defineProperty][reflect] when available, otherwise falls back to [Object.defineProperty][object]. + +## [1.0.0] - 2017-04-20 + +- stable release + +## [0.2.5] - 2015-08-31 + +- use is-descriptor + +## [0.2.3] - 2015-08-29 + +- check keys length + +## [0.2.2] - 2015-08-27 + +- ensure val is an object + +## [0.2.1] - 2015-08-27 + +- support functions + +## [0.2.0] - 2015-08-27 + +- support get/set +- update docs + +## [0.1.0] - 2015-08-12 + +- first commit + +[2.0.0]: https://github.com/jonschlinkert/define-property/compare/1.0.0...2.0.0 +[1.0.0]: https://github.com/jonschlinkert/define-property/compare/0.2.5...1.0.0 +[0.2.5]: https://github.com/jonschlinkert/define-property/compare/0.2.3...0.2.5 +[0.2.3]: https://github.com/jonschlinkert/define-property/compare/0.2.2...0.2.3 +[0.2.2]: https://github.com/jonschlinkert/define-property/compare/0.2.1...0.2.2 +[0.2.1]: https://github.com/jonschlinkert/define-property/compare/0.2.0...0.2.1 +[0.2.0]: https://github.com/jonschlinkert/define-property/compare/0.1.3...0.2.0 + +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog + +[object]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/defineProperty +[reflect]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect/defineProperty diff --git a/node_modules/to-regex/node_modules/define-property/LICENSE b/node_modules/to-regex/node_modules/define-property/LICENSE new file mode 100644 index 00000000..f8de0630 --- /dev/null +++ b/node_modules/to-regex/node_modules/define-property/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-regex/node_modules/define-property/README.md b/node_modules/to-regex/node_modules/define-property/README.md new file mode 100644 index 00000000..f1ee8f92 --- /dev/null +++ b/node_modules/to-regex/node_modules/define-property/README.md @@ -0,0 +1,117 @@ +# define-property [![NPM version](https://img.shields.io/npm/v/define-property.svg?style=flat)](https://www.npmjs.com/package/define-property) [![NPM monthly downloads](https://img.shields.io/npm/dm/define-property.svg?style=flat)](https://npmjs.org/package/define-property) [![NPM total downloads](https://img.shields.io/npm/dt/define-property.svg?style=flat)](https://npmjs.org/package/define-property) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/define-property.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/define-property) + +> Define a non-enumerable property on an object. Uses Reflect.defineProperty when available, otherwise Object.defineProperty. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save define-property +``` + +## Release history + +See [the CHANGELOG](changelog.md) for updates. + +## Usage + +**Params** + +* `object`: The object on which to define the property. +* `key`: The name of the property to be defined or modified. +* `value`: The value or descriptor of the property being defined or modified. + +```js +var define = require('define-property'); +var obj = {}; +define(obj, 'foo', function(val) { + return val.toUpperCase(); +}); + +// by default, defined properties are non-enumberable +console.log(obj); +//=> {} + +console.log(obj.foo('bar')); +//=> 'BAR' +``` + +**defining setters/getters** + +Pass the same properties you would if using [Object.defineProperty](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/defineProperty) or [Reflect.defineProperty](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect/defineProperty). + +```js +define(obj, 'foo', { + set: function() {}, + get: function() {} +}); +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target… [more](https://github.com/jonschlinkert/assign-deep) | [homepage](https://github.com/jonschlinkert/assign-deep "Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target (first) object.") +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [merge-deep](https://www.npmjs.com/package/merge-deep): Recursively merge values in a javascript object. | [homepage](https://github.com/jonschlinkert/merge-deep "Recursively merge values in a javascript object.") +* [mixin-deep](https://www.npmjs.com/package/mixin-deep): Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone. | [homepage](https://github.com/jonschlinkert/mixin-deep "Deeply mix the properties of objects into the first object. Like merge-deep, but doesn't clone.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 28 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [doowb](https://github.com/doowb) | + +### Author + +**Jon Schlinkert** + +* Connect with me on [linkedin/in/jonschlinkert](https://linkedin.com/in/jonschlinkert) +* Follow me on [github/jonschlinkert](https://github.com/jonschlinkert) +* Follow me on [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on January 25, 2018._ \ No newline at end of file diff --git a/node_modules/to-regex/node_modules/define-property/index.js b/node_modules/to-regex/node_modules/define-property/index.js new file mode 100644 index 00000000..0efa0a9e --- /dev/null +++ b/node_modules/to-regex/node_modules/define-property/index.js @@ -0,0 +1,38 @@ +/*! + * define-property + * + * Copyright (c) 2015-2018, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isobject = require('isobject'); +var isDescriptor = require('is-descriptor'); +var define = (typeof Reflect !== 'undefined' && Reflect.defineProperty) + ? Reflect.defineProperty + : Object.defineProperty; + +module.exports = function defineProperty(obj, key, val) { + if (!isobject(obj) && typeof obj !== 'function' && !Array.isArray(obj)) { + throw new TypeError('expected an object, function, or array'); + } + + if (typeof key !== 'string') { + throw new TypeError('expected "key" to be a string'); + } + + if (isDescriptor(val)) { + define(obj, key, val); + return obj; + } + + define(obj, key, { + configurable: true, + enumerable: false, + writable: true, + value: val + }); + + return obj; +}; diff --git a/node_modules/to-regex/node_modules/define-property/package.json b/node_modules/to-regex/node_modules/define-property/package.json new file mode 100644 index 00000000..f8fd21cb --- /dev/null +++ b/node_modules/to-regex/node_modules/define-property/package.json @@ -0,0 +1,67 @@ +{ + "name": "define-property", + "description": "Define a non-enumerable property on an object. Uses Reflect.defineProperty when available, otherwise Object.defineProperty.", + "version": "2.0.2", + "homepage": "https://github.com/jonschlinkert/define-property", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/define-property", + "bugs": { + "url": "https://github.com/jonschlinkert/define-property/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-descriptor": "^1.0.2", + "isobject": "^3.0.1" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "define", + "define-property", + "enumerable", + "key", + "non", + "non-enumerable", + "object", + "prop", + "property", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "assign-deep", + "extend-shallow", + "merge-deep", + "mixin-deep" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/to-regex/node_modules/extend-shallow/LICENSE b/node_modules/to-regex/node_modules/extend-shallow/LICENSE new file mode 100644 index 00000000..99c93691 --- /dev/null +++ b/node_modules/to-regex/node_modules/extend-shallow/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, 2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-regex/node_modules/extend-shallow/README.md b/node_modules/to-regex/node_modules/extend-shallow/README.md new file mode 100644 index 00000000..dee226f4 --- /dev/null +++ b/node_modules/to-regex/node_modules/extend-shallow/README.md @@ -0,0 +1,97 @@ +# extend-shallow [![NPM version](https://img.shields.io/npm/v/extend-shallow.svg?style=flat)](https://www.npmjs.com/package/extend-shallow) [![NPM monthly downloads](https://img.shields.io/npm/dm/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![NPM total downloads](https://img.shields.io/npm/dt/extend-shallow.svg?style=flat)](https://npmjs.org/package/extend-shallow) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/extend-shallow.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/extend-shallow) + +> Extend an object with the properties of additional objects. node.js/javascript util. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save extend-shallow +``` + +## Usage + +```js +var extend = require('extend-shallow'); + +extend({a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +Pass an empty object to shallow clone: + +```js +var obj = {}; +extend(obj, {a: 'b'}, {c: 'd'}) +//=> {a: 'b', c: 'd'} +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow "Extend an object with the properties of additional objects. node.js/javascript util.") +* [for-in](https://www.npmjs.com/package/for-in): Iterate over the own and inherited enumerable properties of an object, and return an object… [more](https://github.com/jonschlinkert/for-in) | [homepage](https://github.com/jonschlinkert/for-in "Iterate over the own and inherited enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js") +* [for-own](https://www.npmjs.com/package/for-own): Iterate over the own enumerable properties of an object, and return an object with properties… [more](https://github.com/jonschlinkert/for-own) | [homepage](https://github.com/jonschlinkert/for-own "Iterate over the own enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 33 | [jonschlinkert](https://github.com/jonschlinkert) | +| 1 | [pdehaan](https://github.com/pdehaan) | + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on November 19, 2017._ \ No newline at end of file diff --git a/node_modules/to-regex/node_modules/extend-shallow/index.js b/node_modules/to-regex/node_modules/extend-shallow/index.js new file mode 100644 index 00000000..c9582f8f --- /dev/null +++ b/node_modules/to-regex/node_modules/extend-shallow/index.js @@ -0,0 +1,60 @@ +'use strict'; + +var isExtendable = require('is-extendable'); +var assignSymbols = require('assign-symbols'); + +module.exports = Object.assign || function(obj/*, objects*/) { + if (obj === null || typeof obj === 'undefined') { + throw new TypeError('Cannot convert undefined or null to object'); + } + if (!isObject(obj)) { + obj = {}; + } + for (var i = 1; i < arguments.length; i++) { + var val = arguments[i]; + if (isString(val)) { + val = toObject(val); + } + if (isObject(val)) { + assign(obj, val); + assignSymbols(obj, val); + } + } + return obj; +}; + +function assign(a, b) { + for (var key in b) { + if (hasOwn(b, key)) { + a[key] = b[key]; + } + } +} + +function isString(val) { + return (val && typeof val === 'string'); +} + +function toObject(str) { + var obj = {}; + for (var i in str) { + obj[i] = str[i]; + } + return obj; +} + +function isObject(val) { + return (val && typeof val === 'object') || isExtendable(val); +} + +/** + * Returns true if the given `key` is an own property of `obj`. + */ + +function hasOwn(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +function isEnum(obj, key) { + return Object.prototype.propertyIsEnumerable.call(obj, key); +} diff --git a/node_modules/to-regex/node_modules/extend-shallow/package.json b/node_modules/to-regex/node_modules/extend-shallow/package.json new file mode 100644 index 00000000..e5e91053 --- /dev/null +++ b/node_modules/to-regex/node_modules/extend-shallow/package.json @@ -0,0 +1,83 @@ +{ + "name": "extend-shallow", + "description": "Extend an object with the properties of additional objects. node.js/javascript util.", + "version": "3.0.2", + "homepage": "https://github.com/jonschlinkert/extend-shallow", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Peter deHaan (http://about.me/peterdehaan)" + ], + "repository": "jonschlinkert/extend-shallow", + "bugs": { + "url": "https://github.com/jonschlinkert/extend-shallow/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "devDependencies": { + "array-slice": "^1.0.0", + "benchmarked": "^2.0.0", + "for-own": "^1.0.0", + "gulp-format-md": "^1.0.0", + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.1", + "minimist": "^1.2.0", + "mocha": "^3.5.3", + "object-assign": "^4.1.1" + }, + "keywords": [ + "assign", + "clone", + "extend", + "merge", + "obj", + "object", + "object-assign", + "object.assign", + "prop", + "properties", + "property", + "props", + "shallow", + "util", + "utility", + "utils", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "related": { + "list": [ + "extend-shallow", + "for-in", + "for-own", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/to-regex/node_modules/is-extendable/LICENSE b/node_modules/to-regex/node_modules/is-extendable/LICENSE new file mode 100644 index 00000000..c0d7f136 --- /dev/null +++ b/node_modules/to-regex/node_modules/is-extendable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/to-regex/node_modules/is-extendable/README.md b/node_modules/to-regex/node_modules/is-extendable/README.md new file mode 100644 index 00000000..875b56a7 --- /dev/null +++ b/node_modules/to-regex/node_modules/is-extendable/README.md @@ -0,0 +1,88 @@ +# is-extendable [![NPM version](https://img.shields.io/npm/v/is-extendable.svg?style=flat)](https://www.npmjs.com/package/is-extendable) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![NPM total downloads](https://img.shields.io/npm/dt/is-extendable.svg?style=flat)](https://npmjs.org/package/is-extendable) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-extendable.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-extendable) + +> Returns true if a value is a plain object, array or function. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extendable +``` + +## Usage + +```js +var isExtendable = require('is-extendable'); +``` + +Returns true if the value is any of the following: + +* array +* plain object +* function + +## Notes + +All objects in JavaScript can have keys, but it's a pain to check for this, since we ether need to verify that the value is not `null` or `undefined` and: + +* the value is not a primitive, or +* that the object is a plain object, function or array + +Also note that an `extendable` object is not the same as an [extensible object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/isExtensible), which is one that (in es6) is not sealed, frozen, or marked as non-extensible using `preventExtensions`. + +## Release history + +### v1.0.0 - 2017/07/20 + +**Breaking changes** + +* No longer considers date, regex or error objects to be extendable + +## About + +### Related projects + +* [assign-deep](https://www.npmjs.com/package/assign-deep): Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target… [more](https://github.com/jonschlinkert/assign-deep) | [homepage](https://github.com/jonschlinkert/assign-deep "Deeply assign the enumerable properties and/or es6 Symbol properies of source objects to the target (first) object.") +* [is-equal-shallow](https://www.npmjs.com/package/is-equal-shallow): Does a shallow comparison of two objects, returning false if the keys or values differ. | [homepage](https://github.com/jonschlinkert/is-equal-shallow "Does a shallow comparison of two objects, returning false if the keys or values differ.") +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 20, 2017._ \ No newline at end of file diff --git a/node_modules/to-regex/node_modules/is-extendable/index.d.ts b/node_modules/to-regex/node_modules/is-extendable/index.d.ts new file mode 100644 index 00000000..b96d5075 --- /dev/null +++ b/node_modules/to-regex/node_modules/is-extendable/index.d.ts @@ -0,0 +1,5 @@ +export = isExtendable; + +declare function isExtendable(val: any): boolean; + +declare namespace isExtendable {} diff --git a/node_modules/to-regex/node_modules/is-extendable/index.js b/node_modules/to-regex/node_modules/is-extendable/index.js new file mode 100644 index 00000000..a8b26ad0 --- /dev/null +++ b/node_modules/to-regex/node_modules/is-extendable/index.js @@ -0,0 +1,14 @@ +/*! + * is-extendable + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isPlainObject = require('is-plain-object'); + +module.exports = function isExtendable(val) { + return isPlainObject(val) || typeof val === 'function' || Array.isArray(val); +}; diff --git a/node_modules/to-regex/node_modules/is-extendable/package.json b/node_modules/to-regex/node_modules/is-extendable/package.json new file mode 100644 index 00000000..2aaab65a --- /dev/null +++ b/node_modules/to-regex/node_modules/is-extendable/package.json @@ -0,0 +1,67 @@ +{ + "name": "is-extendable", + "description": "Returns true if a value is a plain object, array or function.", + "version": "1.0.1", + "homepage": "https://github.com/jonschlinkert/is-extendable", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extendable", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extendable/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "index.d.ts" + ], + "main": "index.js", + "types": "index.d.ts", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.4.2" + }, + "keywords": [ + "array", + "assign", + "check", + "date", + "extend", + "extendable", + "extensible", + "function", + "is", + "object", + "regex", + "test" + ], + "verb": { + "related": { + "list": [ + "assign-deep", + "is-equal-shallow", + "is-plain-object", + "isobject", + "kind-of" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/to-regex/package.json b/node_modules/to-regex/package.json new file mode 100644 index 00000000..e4277d31 --- /dev/null +++ b/node_modules/to-regex/package.json @@ -0,0 +1,62 @@ +{ + "name": "to-regex", + "description": "Generate a regex from a string or array of strings.", + "version": "3.0.2", + "homepage": "https://github.com/jonschlinkert/to-regex", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/to-regex", + "bugs": { + "url": "https://github.com/jonschlinkert/to-regex/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "regex-not": "^1.0.2", + "safe-regex": "^1.1.0" + }, + "devDependencies": { + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "match", + "regex", + "regular expression", + "test", + "to" + ], + "verb": { + "toc": { + "method": "preWrite" + }, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "has-glob", + "is-glob", + "path-regex", + "to-regex-range" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/toidentifier/LICENSE b/node_modules/toidentifier/LICENSE new file mode 100644 index 00000000..de22d159 --- /dev/null +++ b/node_modules/toidentifier/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/toidentifier/README.md b/node_modules/toidentifier/README.md new file mode 100644 index 00000000..7c8794e2 --- /dev/null +++ b/node_modules/toidentifier/README.md @@ -0,0 +1,61 @@ +# toidentifier + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][codecov-image]][codecov-url] + +> Convert a string of words to a JavaScript identifier + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```bash +$ npm install toidentifier +``` + +## Example + +```js +var toIdentifier = require('toidentifier') + +console.log(toIdentifier('Bad Request')) +// => "BadRequest" +``` + +## API + +This CommonJS module exports a single default function: `toIdentifier`. + +### toIdentifier(string) + +Given a string as the argument, it will be transformed according to +the following rules and the new string will be returned: + +1. Split into words separated by space characters (`0x20`). +2. Upper case the first character of each word. +3. Join the words together with no separator. +4. Remove all non-word (`[0-9a-z_]`) characters. + +## License + +[MIT](LICENSE) + +[codecov-image]: https://img.shields.io/codecov/c/github/component/toidentifier.svg +[codecov-url]: https://codecov.io/gh/component/toidentifier +[downloads-image]: https://img.shields.io/npm/dm/toidentifier.svg +[downloads-url]: https://npmjs.org/package/toidentifier +[npm-image]: https://img.shields.io/npm/v/toidentifier.svg +[npm-url]: https://npmjs.org/package/toidentifier +[travis-image]: https://img.shields.io/travis/component/toidentifier/master.svg +[travis-url]: https://travis-ci.org/component/toidentifier + + +## + +[npm]: https://www.npmjs.com/ + +[yarn]: https://yarnpkg.com/ diff --git a/node_modules/toidentifier/index.js b/node_modules/toidentifier/index.js new file mode 100644 index 00000000..bba54114 --- /dev/null +++ b/node_modules/toidentifier/index.js @@ -0,0 +1,30 @@ +/*! + * toidentifier + * Copyright(c) 2016 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + * @public + */ + +module.exports = toIdentifier + +/** + * Trasform the given string into a JavaScript identifier + * + * @param {string} str + * @returns {string} + * @public + */ + +function toIdentifier (str) { + return str + .split(' ') + .map(function (token) { + return token.slice(0, 1).toUpperCase() + token.slice(1) + }) + .join('') + .replace(/[^ _0-9a-z]/gi, '') +} diff --git a/node_modules/toidentifier/package.json b/node_modules/toidentifier/package.json new file mode 100644 index 00000000..3f78165a --- /dev/null +++ b/node_modules/toidentifier/package.json @@ -0,0 +1,34 @@ +{ + "name": "toidentifier", + "description": "Convert a string of words to a JavaScript identifier", + "version": "1.0.0", + "author": "Douglas Christopher Wilson ", + "contributors": [ + "Douglas Christopher Wilson ", + "Nick Baugh (http://niftylettuce.com/)" + ], + "repository": "component/toidentifier", + "devDependencies": { + "eslint": "4.19.1", + "eslint-config-standard": "11.0.0", + "eslint-plugin-import": "2.11.0", + "eslint-plugin-markdown": "1.0.0-beta.6", + "eslint-plugin-node": "6.0.1", + "eslint-plugin-promise": "3.7.0", + "eslint-plugin-standard": "3.1.0", + "mocha": "1.21.5", + "nyc": "11.8.0" + }, + "engines": { + "node": ">=0.6" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/touch/LICENSE b/node_modules/touch/LICENSE new file mode 100644 index 00000000..05eeeb88 --- /dev/null +++ b/node_modules/touch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/touch/README.md b/node_modules/touch/README.md new file mode 100644 index 00000000..b5a361e6 --- /dev/null +++ b/node_modules/touch/README.md @@ -0,0 +1,52 @@ +# node-touch + +For all your node touching needs. + +## Installing + +```bash +npm install touch +``` + +## CLI Usage: + +See `man touch` + +This package exports a binary called `nodetouch` that works mostly +like the unix builtin `touch(1)`. + +## API Usage: + +```javascript +var touch = require("touch") +``` + +Gives you the following functions: + +* `touch(filename, options, cb)` +* `touch.sync(filename, options)` +* `touch.ftouch(fd, options, cb)` +* `touch.ftouchSync(fd, options)` + +All the `options` objects are optional. + +All the async functions return a Promise. If a callback function is +provided, then it's attached to the Promise. + +## Options + +* `force` like `touch -f` Boolean +* `time` like `touch -t ` Can be a Date object, or any parseable + Date string, or epoch ms number. +* `atime` like `touch -a` Can be either a Boolean, or a Date. +* `mtime` like `touch -m` Can be either a Boolean, or a Date. +* `ref` like `touch -r ` Must be path to a file. +* `nocreate` like `touch -c` Boolean + +If neither `atime` nor `mtime` are set, then both values are set. If +one of them is set, then the other is not. + +## cli + +This package creates a `nodetouch` command line executable that works +very much like the unix builtin `touch(1)` diff --git a/node_modules/touch/bin/nodetouch.js b/node_modules/touch/bin/nodetouch.js new file mode 100755 index 00000000..f78f0829 --- /dev/null +++ b/node_modules/touch/bin/nodetouch.js @@ -0,0 +1,112 @@ +#!/usr/bin/env node +const touch = require("../index.js") + +const usage = code => { + console[code ? 'error' : 'log']( + 'usage:\n' + + 'touch [-acfm] [-r file] [-t [[CC]YY]MMDDhhmm[.SS]] file ...' + ) + process.exit(code) +} + +const singleFlags = { + a: 'atime', + m: 'mtime', + c: 'nocreate', + f: 'force' +} + +const singleOpts = { + r: 'ref', + t: 'time' +} + +const files = [] +const args = process.argv.slice(2) +const options = {} +for (let i = 0; i < args.length; i++) { + const arg = args[i] + if (!arg.match(/^-/)) { + files.push(arg) + continue + } + + // expand shorthands + if (arg.charAt(1) !== '-') { + const expand = [] + for (let f = 1; f < arg.length; f++) { + const fc = arg.charAt(f) + const sf = singleFlags[fc] + const so = singleOpts[fc] + if (sf) + expand.push('--' + sf) + else if (so) { + const soslice = arg.slice(f + 1) + const soval = soslice.charAt(0) === '=' ? soslice : '=' + soslice + expand.push('--' + so + soval) + f = arg.length + } else if (arg !== '-' + fc) + expand.push('-' + fc) + } + if (expand.length) { + args.splice.apply(args, [i, 1].concat(expand)) + i-- + continue + } + } + + const argsplit = arg.split('=') + const key = argsplit.shift().replace(/^\-\-/, '') + const val = argsplit.length ? argsplit.join('=') : null + + switch (key) { + case 'time': + const timestr = val || args[++i] + // [-t [[CC]YY]MMDDhhmm[.SS]] + const parsedtime = timestr.match( + /^(([0-9]{2})?([0-9]{2}))?([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})(\.([0-9]{2}))?$/ + ) + if (!parsedtime) { + console.error('touch: out of range or illegal ' + + 'time specification: ' + + '[[CC]YY]MMDDhhmm[.SS]') + process.exit(1) + } else { + const y = +parsedtime[1] + const year = parsedtime[2] ? y + : y <= 68 ? 2000 + y + : 1900 + y + + const MM = +parsedtime[4] - 1 + const dd = +parsedtime[5] + const hh = +parsedtime[6] + const mm = +parsedtime[7] + const ss = +parsedtime[8] + + options.time = new Date(Date.UTC(year, MM, dd, hh, mm, ss)) + } + continue + + case 'ref': + options.ref = val || args[++i] + continue + + case 'mtime': + case 'nocreate': + case 'atime': + case 'force': + options[key] = true + continue + + default: + console.error('touch: illegal option -- ' + arg) + usage(1) + } +} + +if (!files.length) + usage() + +process.exitCode = 0 +Promise.all(files.map(f => touch(f, options))) + .catch(er => process.exitCode = 1) diff --git a/node_modules/touch/index.js b/node_modules/touch/index.js new file mode 100644 index 00000000..f942e42a --- /dev/null +++ b/node_modules/touch/index.js @@ -0,0 +1,224 @@ +'use strict' + +const EE = require('events').EventEmitter +const cons = require('constants') +const fs = require('fs') + +module.exports = (f, options, cb) => { + if (typeof options === 'function') + cb = options, options = {} + + const p = new Promise((res, rej) => { + new Touch(validOpts(options, f, null)) + .on('done', res).on('error', rej) + }) + + return cb ? p.then(res => cb(null, res), cb) : p +} + +module.exports.sync = module.exports.touchSync = (f, options) => + (new TouchSync(validOpts(options, f, null)), undefined) + +module.exports.ftouch = (fd, options, cb) => { + if (typeof options === 'function') + cb = options, options = {} + + const p = new Promise((res, rej) => { + new Touch(validOpts(options, null, fd)) + .on('done', res).on('error', rej) + }) + + return cb ? p.then(res => cb(null, res), cb) : p +} + +module.exports.ftouchSync = (fd, opt) => + (new TouchSync(validOpts(opt, null, fd)), undefined) + +const validOpts = (options, path, fd) => { + options = Object.create(options || {}) + options.fd = fd + options.path = path + + // {mtime: true}, {ctime: true} + // If set to something else, then treat as epoch ms value + const now = parseInt(new Date(options.time || Date.now()).getTime() / 1000) + if (!options.atime && !options.mtime) + options.atime = options.mtime = now + else { + if (true === options.atime) + options.atime = now + + if (true === options.mtime) + options.mtime = now + } + + let oflags = 0 + if (!options.force) + oflags = oflags | cons.O_RDWR + + if (!options.nocreate) + oflags = oflags | cons.O_CREAT + + options.oflags = oflags + return options +} + +class Touch extends EE { + constructor (options) { + super(options) + this.fd = options.fd + this.path = options.path + this.atime = options.atime + this.mtime = options.mtime + this.ref = options.ref + this.nocreate = !!options.nocreate + this.force = !!options.force + this.closeAfter = options.closeAfter + this.oflags = options.oflags + this.options = options + + if (typeof this.fd !== 'number') { + this.closeAfter = true + this.open() + } else + this.onopen(null, this.fd) + } + + emit (ev, data) { + // we only emit when either done or erroring + // in both cases, need to close + this.close() + return super.emit(ev, data) + } + + close () { + if (typeof this.fd === 'number' && this.closeAfter) + fs.close(this.fd, () => {}) + } + + open () { + fs.open(this.path, this.oflags, (er, fd) => this.onopen(er, fd)) + } + + onopen (er, fd) { + if (er) { + if (er.code === 'EISDIR') + this.onopen(null, null) + else if (er.code === 'ENOENT' && this.nocreate) + this.emit('done') + else + this.emit('error', er) + } else { + this.fd = fd + if (this.ref) + this.statref() + else if (!this.atime || !this.mtime) + this.fstat() + else + this.futimes() + } + } + + statref () { + fs.stat(this.ref, (er, st) => { + if (er) + this.emit('error', er) + else + this.onstatref(st) + }) + } + + onstatref (st) { + this.atime = this.atime && parseInt(st.atime.getTime()/1000, 10) + this.mtime = this.mtime && parseInt(st.mtime.getTime()/1000, 10) + if (!this.atime || !this.mtime) + this.fstat() + else + this.futimes() + } + + fstat () { + const stat = this.fd ? 'fstat' : 'stat' + const target = this.fd || this.path + fs[stat](target, (er, st) => { + if (er) + this.emit('error', er) + else + this.onfstat(st) + }) + } + + onfstat (st) { + if (typeof this.atime !== 'number') + this.atime = parseInt(st.atime.getTime()/1000, 10) + + if (typeof this.mtime !== 'number') + this.mtime = parseInt(st.mtime.getTime()/1000, 10) + + this.futimes() + } + + futimes () { + const utimes = this.fd ? 'futimes' : 'utimes' + const target = this.fd || this.path + fs[utimes](target, ''+this.atime, ''+this.mtime, er => { + if (er) + this.emit('error', er) + else + this.emit('done') + }) + } +} + +class TouchSync extends Touch { + open () { + try { + this.onopen(null, fs.openSync(this.path, this.oflags)) + } catch (er) { + this.onopen(er) + } + } + + statref () { + let threw = true + try { + this.onstatref(fs.statSync(this.ref)) + threw = false + } finally { + if (threw) + this.close() + } + } + + fstat () { + let threw = true + const stat = this.fd ? 'fstatSync' : 'statSync' + const target = this.fd || this.path + try { + this.onfstat(fs[stat](target)) + threw = false + } finally { + if (threw) + this.close() + } + } + + futimes () { + let threw = true + const utimes = this.fd ? 'futimesSync' : 'utimesSync' + const target = this.fd || this.path + try { + fs[utimes](target, this.atime, this.mtime) + threw = false + } finally { + if (threw) + this.close() + } + this.emit('done') + } + + close () { + if (typeof this.fd === 'number' && this.closeAfter) + try { fs.closeSync(this.fd) } catch (er) {} + } +} diff --git a/node_modules/touch/node_modules/.bin/nopt b/node_modules/touch/node_modules/.bin/nopt new file mode 120000 index 00000000..d6493b6a --- /dev/null +++ b/node_modules/touch/node_modules/.bin/nopt @@ -0,0 +1 @@ +../../../nopt/bin/nopt.js \ No newline at end of file diff --git a/node_modules/touch/package.json b/node_modules/touch/package.json new file mode 100644 index 00000000..05608de3 --- /dev/null +++ b/node_modules/touch/package.json @@ -0,0 +1,28 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "touch", + "description": "like touch(1) in node", + "version": "3.1.0", + "repository": "git://github.com/isaacs/node-touch.git", + "bin": { + "nodetouch": "./bin/nodetouch.js" + }, + "dependencies": { + "nopt": "~1.0.10" + }, + "license": "ISC", + "scripts": { + "test": "tap test/*.js --100 -J", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "devDependencies": { + "mutate-fs": "^1.1.0", + "tap": "^10.7.0" + }, + "files": [ + "index.js", + "bin/nodetouch.js" + ] +} diff --git a/node_modules/type-is/HISTORY.md b/node_modules/type-is/HISTORY.md new file mode 100644 index 00000000..8de21f7a --- /dev/null +++ b/node_modules/type-is/HISTORY.md @@ -0,0 +1,259 @@ +1.6.18 / 2019-04-26 +=================== + + * Fix regression passing request object to `typeis.is` + +1.6.17 / 2019-04-25 +=================== + + * deps: mime-types@~2.1.24 + - Add Apple file extensions from IANA + - Add extension `.csl` to `application/vnd.citationstyles.style+xml` + - Add extension `.es` to `application/ecmascript` + - Add extension `.nq` to `application/n-quads` + - Add extension `.nt` to `application/n-triples` + - Add extension `.owl` to `application/rdf+xml` + - Add extensions `.siv` and `.sieve` to `application/sieve` + - Add extensions from IANA for `image/*` types + - Add extensions from IANA for `model/*` types + - Add extensions to HEIC image types + - Add new mime types + - Add `text/mdx` with extension `.mdx` + * perf: prevent internal `throw` on invalid type + +1.6.16 / 2018-02-16 +=================== + + * deps: mime-types@~2.1.18 + - Add `application/raml+yaml` with extension `.raml` + - Add `application/wasm` with extension `.wasm` + - Add `text/shex` with extension `.shex` + - Add extensions for JPEG-2000 images + - Add extensions from IANA for `message/*` types + - Add extension `.mjs` to `application/javascript` + - Add extension `.wadl` to `application/vnd.sun.wadl+xml` + - Add extension `.gz` to `application/gzip` + - Add glTF types and extensions + - Add new mime types + - Update extensions `.md` and `.markdown` to be `text/markdown` + - Update font MIME types + - Update `text/hjson` to registered `application/hjson` + +1.6.15 / 2017-03-31 +=================== + + * deps: mime-types@~2.1.15 + - Add new mime types + +1.6.14 / 2016-11-18 +=================== + + * deps: mime-types@~2.1.13 + - Add new mime types + +1.6.13 / 2016-05-18 +=================== + + * deps: mime-types@~2.1.11 + - Add new mime types + +1.6.12 / 2016-02-28 +=================== + + * deps: mime-types@~2.1.10 + - Add new mime types + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + +1.6.11 / 2016-01-29 +=================== + + * deps: mime-types@~2.1.9 + - Add new mime types + +1.6.10 / 2015-12-01 +=================== + + * deps: mime-types@~2.1.8 + - Add new mime types + +1.6.9 / 2015-09-27 +================== + + * deps: mime-types@~2.1.7 + - Add new mime types + +1.6.8 / 2015-09-04 +================== + + * deps: mime-types@~2.1.6 + - Add new mime types + +1.6.7 / 2015-08-20 +================== + + * Fix type error when given invalid type to match against + * deps: mime-types@~2.1.5 + - Add new mime types + +1.6.6 / 2015-07-31 +================== + + * deps: mime-types@~2.1.4 + - Add new mime types + +1.6.5 / 2015-07-16 +================== + + * deps: mime-types@~2.1.3 + - Add new mime types + +1.6.4 / 2015-07-01 +================== + + * deps: mime-types@~2.1.2 + - Add new mime types + * perf: enable strict mode + * perf: remove argument reassignment + +1.6.3 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - Add new mime types + * perf: reduce try block size + * perf: remove bitwise operations + +1.6.2 / 2015-05-10 +================== + + * deps: mime-types@~2.0.11 + - Add new mime types + +1.6.1 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - Add new mime types + +1.6.0 / 2015-02-12 +================== + + * fix false-positives in `hasBody` `Transfer-Encoding` check + * support wildcard for both type and subtype (`*/*`) + +1.5.7 / 2015-02-09 +================== + + * fix argument reassignment + * deps: mime-types@~2.0.9 + - Add new mime types + +1.5.6 / 2015-01-29 +================== + + * deps: mime-types@~2.0.8 + - Add new mime types + +1.5.5 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - Add new mime types + - Fix missing extensions + - Fix various invalid MIME type entries + - Remove example template MIME types + - deps: mime-db@~1.5.0 + +1.5.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - Add new mime types + - deps: mime-db@~1.3.0 + +1.5.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - Add new mime types + - deps: mime-db@~1.2.0 + +1.5.2 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - Add new mime types + - deps: mime-db@~1.1.0 + +1.5.1 / 2014-09-07 +================== + + * Support Node.js 0.6 + * deps: media-typer@0.3.0 + * deps: mime-types@~2.0.1 + - Support Node.js 0.6 + +1.5.0 / 2014-09-05 +================== + + * fix `hasbody` to be true for `content-length: 0` + +1.4.0 / 2014-09-02 +================== + + * update mime-types + +1.3.2 / 2014-06-24 +================== + + * use `~` range on mime-types + +1.3.1 / 2014-06-19 +================== + + * fix global variable leak + +1.3.0 / 2014-06-19 +================== + + * improve type parsing + + - invalid media type never matches + - media type not case-sensitive + - extra LWS does not affect results + +1.2.2 / 2014-06-19 +================== + + * fix behavior on unknown type argument + +1.2.1 / 2014-06-03 +================== + + * switch dependency from `mime` to `mime-types@1.0.0` + +1.2.0 / 2014-05-11 +================== + + * support suffix matching: + + - `+json` matches `application/vnd+json` + - `*/vnd+json` matches `application/vnd+json` + - `application/*+json` matches `application/vnd+json` + +1.1.0 / 2014-04-12 +================== + + * add non-array values support + * expose internal utilities: + + - `.is()` + - `.hasBody()` + - `.normalize()` + - `.match()` + +1.0.1 / 2014-03-30 +================== + + * add `multipart` as a shorthand diff --git a/node_modules/type-is/LICENSE b/node_modules/type-is/LICENSE new file mode 100644 index 00000000..386b7b69 --- /dev/null +++ b/node_modules/type-is/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/type-is/README.md b/node_modules/type-is/README.md new file mode 100644 index 00000000..b85ef8f7 --- /dev/null +++ b/node_modules/type-is/README.md @@ -0,0 +1,170 @@ +# type-is + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Infer the content-type of a request. + +### Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install type-is +``` + +## API + +```js +var http = require('http') +var typeis = require('type-is') + +http.createServer(function (req, res) { + var istext = typeis(req, ['text/*']) + res.end('you ' + (istext ? 'sent' : 'did not send') + ' me text') +}) +``` + +### typeis(request, types) + +Checks if the `request` is one of the `types`. If the request has no body, +even if there is a `Content-Type` header, then `null` is returned. If the +`Content-Type` header is invalid or does not matches any of the `types`, then +`false` is returned. Otherwise, a string of the type that matched is returned. + +The `request` argument is expected to be a Node.js HTTP request. The `types` +argument is an array of type strings. + +Each type in the `types` array can be one of the following: + +- A file extension name such as `json`. This name will be returned if matched. +- A mime type such as `application/json`. +- A mime type with a wildcard such as `*/*` or `*/json` or `application/*`. + The full mime type will be returned if matched. +- A suffix such as `+json`. This can be combined with a wildcard such as + `*/vnd+json` or `application/*+json`. The full mime type will be returned + if matched. + +Some examples to illustrate the inputs and returned value: + + + +```js +// req.headers.content-type = 'application/json' + +typeis(req, ['json']) // => 'json' +typeis(req, ['html', 'json']) // => 'json' +typeis(req, ['application/*']) // => 'application/json' +typeis(req, ['application/json']) // => 'application/json' + +typeis(req, ['html']) // => false +``` + +### typeis.hasBody(request) + +Returns a Boolean if the given `request` has a body, regardless of the +`Content-Type` header. + +Having a body has no relation to how large the body is (it may be 0 bytes). +This is similar to how file existence works. If a body does exist, then this +indicates that there is data to read from the Node.js request stream. + + + +```js +if (typeis.hasBody(req)) { + // read the body, since there is one + + req.on('data', function (chunk) { + // ... + }) +} +``` + +### typeis.is(mediaType, types) + +Checks if the `mediaType` is one of the `types`. If the `mediaType` is invalid +or does not matches any of the `types`, then `false` is returned. Otherwise, a +string of the type that matched is returned. + +The `mediaType` argument is expected to be a +[media type](https://tools.ietf.org/html/rfc6838) string. The `types` argument +is an array of type strings. + +Each type in the `types` array can be one of the following: + +- A file extension name such as `json`. This name will be returned if matched. +- A mime type such as `application/json`. +- A mime type with a wildcard such as `*/*` or `*/json` or `application/*`. + The full mime type will be returned if matched. +- A suffix such as `+json`. This can be combined with a wildcard such as + `*/vnd+json` or `application/*+json`. The full mime type will be returned + if matched. + +Some examples to illustrate the inputs and returned value: + + + +```js +var mediaType = 'application/json' + +typeis.is(mediaType, ['json']) // => 'json' +typeis.is(mediaType, ['html', 'json']) // => 'json' +typeis.is(mediaType, ['application/*']) // => 'application/json' +typeis.is(mediaType, ['application/json']) // => 'application/json' + +typeis.is(mediaType, ['html']) // => false +``` + +## Examples + +### Example body parser + +```js +var express = require('express') +var typeis = require('type-is') + +var app = express() + +app.use(function bodyParser (req, res, next) { + if (!typeis.hasBody(req)) { + return next() + } + + switch (typeis(req, ['urlencoded', 'json', 'multipart'])) { + case 'urlencoded': + // parse urlencoded body + throw new Error('implement urlencoded body parsing') + case 'json': + // parse json body + throw new Error('implement json body parsing') + case 'multipart': + // parse multipart body + throw new Error('implement multipart body parsing') + default: + // 415 error code + res.statusCode = 415 + res.end() + break + } +}) +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/type-is/master +[coveralls-url]: https://coveralls.io/r/jshttp/type-is?branch=master +[node-version-image]: https://badgen.net/npm/node/type-is +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/type-is +[npm-url]: https://npmjs.org/package/type-is +[npm-version-image]: https://badgen.net/npm/v/type-is +[travis-image]: https://badgen.net/travis/jshttp/type-is/master +[travis-url]: https://travis-ci.org/jshttp/type-is diff --git a/node_modules/type-is/index.js b/node_modules/type-is/index.js new file mode 100644 index 00000000..890ad76c --- /dev/null +++ b/node_modules/type-is/index.js @@ -0,0 +1,266 @@ +/*! + * type-is + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var typer = require('media-typer') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = typeofrequest +module.exports.is = typeis +module.exports.hasBody = hasbody +module.exports.normalize = normalize +module.exports.match = mimeMatch + +/** + * Compare a `value` content-type with `types`. + * Each `type` can be an extension like `html`, + * a special shortcut like `multipart` or `urlencoded`, + * or a mime type. + * + * If no types match, `false` is returned. + * Otherwise, the first `type` that matches is returned. + * + * @param {String} value + * @param {Array} types + * @public + */ + +function typeis (value, types_) { + var i + var types = types_ + + // remove parameters and normalize + var val = tryNormalizeType(value) + + // no type or invalid + if (!val) { + return false + } + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length - 1) + for (i = 0; i < types.length; i++) { + types[i] = arguments[i + 1] + } + } + + // no types, return the content type + if (!types || !types.length) { + return val + } + + var type + for (i = 0; i < types.length; i++) { + if (mimeMatch(normalize(type = types[i]), val)) { + return type[0] === '+' || type.indexOf('*') !== -1 + ? val + : type + } + } + + // no matches + return false +} + +/** + * Check if a request has a request body. + * A request with a body __must__ either have `transfer-encoding` + * or `content-length` headers set. + * http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 + * + * @param {Object} request + * @return {Boolean} + * @public + */ + +function hasbody (req) { + return req.headers['transfer-encoding'] !== undefined || + !isNaN(req.headers['content-length']) +} + +/** + * Check if the incoming request contains the "Content-Type" + * header field, and it contains any of the give mime `type`s. + * If there is no request body, `null` is returned. + * If there is no content type, `false` is returned. + * Otherwise, it returns the first `type` that matches. + * + * Examples: + * + * // With Content-Type: text/html; charset=utf-8 + * this.is('html'); // => 'html' + * this.is('text/html'); // => 'text/html' + * this.is('text/*', 'application/json'); // => 'text/html' + * + * // When Content-Type is application/json + * this.is('json', 'urlencoded'); // => 'json' + * this.is('application/json'); // => 'application/json' + * this.is('html', 'application/*'); // => 'application/json' + * + * this.is('html'); // => false + * + * @param {String|Array} types... + * @return {String|false|null} + * @public + */ + +function typeofrequest (req, types_) { + var types = types_ + + // no body + if (!hasbody(req)) { + return null + } + + // support flattened arguments + if (arguments.length > 2) { + types = new Array(arguments.length - 1) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i + 1] + } + } + + // request content type + var value = req.headers['content-type'] + + return typeis(value, types) +} + +/** + * Normalize a mime type. + * If it's a shorthand, expand it to a valid mime type. + * + * In general, you probably want: + * + * var type = is(req, ['urlencoded', 'json', 'multipart']); + * + * Then use the appropriate body parsers. + * These three are the most common request body types + * and are thus ensured to work. + * + * @param {String} type + * @private + */ + +function normalize (type) { + if (typeof type !== 'string') { + // invalid type + return false + } + + switch (type) { + case 'urlencoded': + return 'application/x-www-form-urlencoded' + case 'multipart': + return 'multipart/*' + } + + if (type[0] === '+') { + // "+json" -> "*/*+json" expando + return '*/*' + type + } + + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if `expected` mime type + * matches `actual` mime type with + * wildcard and +suffix support. + * + * @param {String} expected + * @param {String} actual + * @return {Boolean} + * @private + */ + +function mimeMatch (expected, actual) { + // invalid type + if (expected === false) { + return false + } + + // split types + var actualParts = actual.split('/') + var expectedParts = expected.split('/') + + // invalid format + if (actualParts.length !== 2 || expectedParts.length !== 2) { + return false + } + + // validate type + if (expectedParts[0] !== '*' && expectedParts[0] !== actualParts[0]) { + return false + } + + // validate suffix wildcard + if (expectedParts[1].substr(0, 2) === '*+') { + return expectedParts[1].length <= actualParts[1].length + 1 && + expectedParts[1].substr(1) === actualParts[1].substr(1 - expectedParts[1].length) + } + + // validate subtype + if (expectedParts[1] !== '*' && expectedParts[1] !== actualParts[1]) { + return false + } + + return true +} + +/** + * Normalize a type and remove parameters. + * + * @param {string} value + * @return {string} + * @private + */ + +function normalizeType (value) { + // parse the type + var type = typer.parse(value) + + // remove the parameters + type.parameters = undefined + + // reformat it + return typer.format(type) +} + +/** + * Try to normalize a type and remove parameters. + * + * @param {string} value + * @return {string} + * @private + */ + +function tryNormalizeType (value) { + if (!value) { + return null + } + + try { + return normalizeType(value) + } catch (err) { + return null + } +} diff --git a/node_modules/type-is/package.json b/node_modules/type-is/package.json new file mode 100644 index 00000000..97ba5f14 --- /dev/null +++ b/node_modules/type-is/package.json @@ -0,0 +1,45 @@ +{ + "name": "type-is", + "description": "Infer the content-type of a request.", + "version": "1.6.18", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/type-is", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "devDependencies": { + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.17.2", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "mocha": "6.1.4", + "nyc": "14.0.0" + }, + "engines": { + "node": ">= 0.6" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test" + }, + "keywords": [ + "content", + "type", + "checking" + ] +} diff --git a/node_modules/undefsafe/.jscsrc b/node_modules/undefsafe/.jscsrc new file mode 100644 index 00000000..9e01c9be --- /dev/null +++ b/node_modules/undefsafe/.jscsrc @@ -0,0 +1,13 @@ +{ + "preset": "node-style-guide", + "requireCapitalizedComments": null, + "requireSpacesInAnonymousFunctionExpression": { + "beforeOpeningCurlyBrace": true, + "beforeOpeningRoundBrace": true + }, + "disallowSpacesInNamedFunctionExpression": { + "beforeOpeningRoundBrace": true + }, + "excludeFiles": ["node_modules/**"], + "disallowSpacesInFunction": null +} diff --git a/node_modules/undefsafe/.jshintrc b/node_modules/undefsafe/.jshintrc new file mode 100644 index 00000000..b47f672f --- /dev/null +++ b/node_modules/undefsafe/.jshintrc @@ -0,0 +1,16 @@ +{ + "browser": false, + "camelcase": true, + "curly": true, + "devel": true, + "eqeqeq": true, + "forin": true, + "indent": 2, + "noarg": true, + "node": true, + "quotmark": "single", + "undef": true, + "strict": false, + "unused": true +} + diff --git a/node_modules/undefsafe/.npmignore b/node_modules/undefsafe/.npmignore new file mode 100644 index 00000000..5f6a848d --- /dev/null +++ b/node_modules/undefsafe/.npmignore @@ -0,0 +1,2 @@ +# .npmignore file +test/ diff --git a/node_modules/undefsafe/.travis.yml b/node_modules/undefsafe/.travis.yml new file mode 100644 index 00000000..a1ace24a --- /dev/null +++ b/node_modules/undefsafe/.travis.yml @@ -0,0 +1,18 @@ +sudo: false +language: node_js +cache: + directories: + - node_modules +notifications: + email: false +node_js: + - '4' +before_install: + - npm i -g npm@^2.0.0 +before_script: + - npm prune +after_success: + - npm run semantic-release +branches: + except: + - "/^v\\d+\\.\\d+\\.\\d+$/" diff --git a/node_modules/undefsafe/LICENSE b/node_modules/undefsafe/LICENSE new file mode 100644 index 00000000..caaf03ae --- /dev/null +++ b/node_modules/undefsafe/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright © 2016 Remy Sharp, http://remysharp.com + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/undefsafe/README.md b/node_modules/undefsafe/README.md new file mode 100644 index 00000000..46a706bc --- /dev/null +++ b/node_modules/undefsafe/README.md @@ -0,0 +1,63 @@ +# undefsafe + +Simple *function* for retrieving deep object properties without getting "Cannot read property 'X' of undefined" + +Can also be used to safely set deep values. + +## Usage + +```js +var object = { + a: { + b: { + c: 1, + d: [1,2,3], + e: 'remy' + } + } +}; + +console.log(undefsafe(object, 'a.b.e')); // "remy" +console.log(undefsafe(object, 'a.b.not.found')); // undefined +``` + +Demo: [https://jsbin.com/eroqame/3/edit?js,console](https://jsbin.com/eroqame/3/edit?js,console) + +## Setting + +```js +var object = { + a: { + b: [1,2,3] + } +}; + +// modified object +var res = undefsafe(object, 'a.b.0', 10); + +console.log(object); // { a: { b: [10, 2, 3] } } +console.log(res); // 1 - previous value +``` + +## Star rules in paths + +As of 1.2.0, `undefsafe` supports a `*` in the path if you want to search all of the properties (or array elements) for a particular element. + +The function will only return a single result, either the 3rd argument validation value, or the first positive match. For example, the following github data: + +```js +const githubData = { + commits: [{ + modified: [ + "one", + "two" + ] + }, /* ... */ ] + }; + +// first modified file found in the first commit +console.log(undefsafe(githubData, 'commits.*.modified.0')); + +// returns `two` or undefined if not found +console.log(undefsafe(githubData, 'commits.*.modified.*', 'two')); +``` diff --git a/node_modules/undefsafe/example.js b/node_modules/undefsafe/example.js new file mode 100644 index 00000000..922d8b9e --- /dev/null +++ b/node_modules/undefsafe/example.js @@ -0,0 +1,14 @@ +var undefsafe = require('undefsafe'); + +var object = { + a: { + b: { + c: 1, + d: [1,2,3], + e: 'remy' + } + } +}; + +console.log(undefsafe(object, 'a.b.e')); // "remy" +console.log(undefsafe(object, 'a.b.not.found')); // undefined diff --git a/node_modules/undefsafe/lib/undefsafe.js b/node_modules/undefsafe/lib/undefsafe.js new file mode 100644 index 00000000..0cfb5fda --- /dev/null +++ b/node_modules/undefsafe/lib/undefsafe.js @@ -0,0 +1,113 @@ +'use strict'; + +function undefsafe(obj, path, value, __res) { + + // I'm not super keen on this private function, but it's because + // it'll also be use in the browser and I wont *one* function exposed + function split(path) { + var res = []; + var level = 0; + var key = ''; + + for (var i = 0; i < path.length; i++) { + var c = path.substr(i, 1); + + if (level === 0 && (c === '.' || c === '[')) { + if (c === '[') { + level++; + i++; + c = path.substr(i, 1); + } + + if (key) { // the first value could be a string + res.push(key); + } + key = ''; + continue; + } + + if (c === ']') { + level--; + key = key.slice(0, -1); + continue; + } + + key += c; + } + + res.push(key); + + return res; + } + + // bail if there's nothing + if (obj === undefined || obj === null) { + return undefined; + } + + var parts = split(path); + var key = null; + var type = typeof obj; + var root = obj; + var parent = obj; + + var star = parts.filter(function (_) { return _ === '*' }).length > 0; + + // we're dealing with a primative + if (type !== 'object' && type !== 'function') { + return obj; + } else if (path.trim() === '') { + return obj; + } + + key = parts[0]; + var i = 0; + for (; i < parts.length; i++) { + key = parts[i]; + parent = obj; + + if (key === '*') { + // loop through each property + var prop = ''; + var res = __res || []; + + for (prop in parent) { + var shallowObj = undefsafe(obj[prop], parts.slice(i + 1).join('.'), value, res); + if (shallowObj && shallowObj !== res) { + if ((value && shallowObj === value) || (value === undefined)) { + if (value !== undefined) { + return shallowObj; + } + + res.push(shallowObj); + } + } + } + + if (res.length === 0) { + return undefined; + } + + return res; + } + + obj = obj[key]; + if (obj === undefined || obj === null) { + break; + } + } + + // if we have a null object, make sure it's the one the user was after, + // if it's not (i.e. parts has a length) then give undefined back. + if (obj === null && i !== parts.length - 1) { + obj = undefined; + } else if (!star && value) { + key = path.split('.').pop(); + parent[key] = value; + } + return obj; +} + +if (typeof module !== 'undefined') { + module.exports = undefsafe; +} diff --git a/node_modules/undefsafe/package.json b/node_modules/undefsafe/package.json new file mode 100644 index 00000000..37568e8e --- /dev/null +++ b/node_modules/undefsafe/package.json @@ -0,0 +1,32 @@ +{ + "name": "undefsafe", + "description": "Undefined safe way of extracting object properties", + "main": "lib/undefsafe.js", + "tonicExampleFilename": "example.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "tap test/**/*.test.js -R spec", + "cover": "tap test/*.test.js --cov --coverage-report=lcov", + "semantic-release": "semantic-release pre && npm publish && semantic-release post" + }, + "repository": { + "type": "git", + "url": "https://github.com/remy/undefsafe.git" + }, + "keywords": [ + "undefined" + ], + "author": "Remy Sharp", + "license": "MIT", + "devDependencies": { + "semantic-release": "^4.3.5", + "tap": "^5.7.1", + "tap-only": "0.0.5" + }, + "dependencies": { + "debug": "^2.2.0" + }, + "version": "2.0.2" +} \ No newline at end of file diff --git a/node_modules/union-value/LICENSE b/node_modules/union-value/LICENSE new file mode 100644 index 00000000..83b56e70 --- /dev/null +++ b/node_modules/union-value/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/union-value/README.md b/node_modules/union-value/README.md new file mode 100644 index 00000000..1a90ffbd --- /dev/null +++ b/node_modules/union-value/README.md @@ -0,0 +1,73 @@ +# union-value [![NPM version](https://img.shields.io/npm/v/union-value.svg?style=flat)](https://www.npmjs.com/package/union-value) [![NPM monthly downloads](https://img.shields.io/npm/dm/union-value.svg?style=flat)](https://npmjs.org/package/union-value) [![NPM total downloads](https://img.shields.io/npm/dt/union-value.svg?style=flat)](https://npmjs.org/package/union-value) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/union-value.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/union-value) + +> Set an array of unique values as the property of an object. Supports setting deeply nested properties using using object-paths/dot notation. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save union-value +``` + +## Usage + +```js +var union = require('union-value'); + +var obj = {}; + +union(obj, 'a.b.c', ['one', 'two']); +union(obj, 'a.b.c', ['three']); + +console.log(obj); +//=> {a: {b: {c: [ 'one', 'two', 'three' ] }}} +``` + +## About + +### Related projects + +* [assign-value](https://www.npmjs.com/package/assign-value): Assign a value or extend a deeply nested property of an object using object path… [more](https://github.com/jonschlinkert/assign-value) | [homepage](https://github.com/jonschlinkert/assign-value "Assign a value or extend a deeply nested property of an object using object path notation.") +* [get-value](https://www.npmjs.com/package/get-value): Use property paths (`a.b.c`) to get a nested value from an object. | [homepage](https://github.com/jonschlinkert/get-value "Use property paths (`a.b.c`) to get a nested value from an object.") +* [has-value](https://www.npmjs.com/package/has-value): Returns true if a value exists, false if empty. Works with deeply nested values using… [more](https://github.com/jonschlinkert/has-value) | [homepage](https://github.com/jonschlinkert/has-value "Returns true if a value exists, false if empty. Works with deeply nested values using object paths.") +* [set-value](https://www.npmjs.com/package/set-value): Create nested values and any intermediaries using dot notation (`'a.b.c'`) paths. | [homepage](https://github.com/jonschlinkert/set-value "Create nested values and any intermediaries using dot notation (`'a.b.c'`) paths.") +* [unset-value](https://www.npmjs.com/package/unset-value): Delete nested properties from an object using dot notation. | [homepage](https://github.com/jonschlinkert/unset-value "Delete nested properties from an object using dot notation.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.4.2, on February 25, 2017._ \ No newline at end of file diff --git a/node_modules/union-value/index.js b/node_modules/union-value/index.js new file mode 100644 index 00000000..9c5d8db2 --- /dev/null +++ b/node_modules/union-value/index.js @@ -0,0 +1,30 @@ +'use strict'; + +var isObject = require('is-extendable'); +var union = require('arr-union'); +var get = require('get-value'); +var set = require('set-value'); + +module.exports = function unionValue(obj, prop, value) { + if (!isObject(obj)) { + throw new TypeError('union-value expects the first argument to be an object.'); + } + + if (typeof prop !== 'string') { + throw new TypeError('union-value expects `prop` to be a string.'); + } + + var arr = arrayify(get(obj, prop)); + set(obj, prop, union(arr, arrayify(value))); + return obj; +}; + +function arrayify(val) { + if (val === null || typeof val === 'undefined') { + return []; + } + if (Array.isArray(val)) { + return val; + } + return [val]; +} diff --git a/node_modules/union-value/package.json b/node_modules/union-value/package.json new file mode 100644 index 00000000..65441f6e --- /dev/null +++ b/node_modules/union-value/package.json @@ -0,0 +1,70 @@ +{ + "name": "union-value", + "description": "Set an array of unique values as the property of an object. Supports setting deeply nested properties using using object-paths/dot notation.", + "version": "1.0.1", + "homepage": "https://github.com/jonschlinkert/union-value", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/union-value", + "bugs": { + "url": "https://github.com/jonschlinkert/union-value/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "arr-union": "^3.1.0", + "get-value": "^2.0.6", + "is-extendable": "^0.1.1", + "set-value": "^2.0.1" + }, + "devDependencies": { + "gulp-format-md": "^0.1.11", + "mocha": "^3.2.0", + "should": "^11.2.0" + }, + "keywords": [ + "array", + "dot", + "get", + "has", + "nested", + "notation", + "object", + "path", + "prop", + "property", + "set", + "union", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "assign-value", + "get-value", + "has-value", + "set-value", + "unset-value" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/unique-string/index.js b/node_modules/unique-string/index.js new file mode 100644 index 00000000..5bc7787f --- /dev/null +++ b/node_modules/unique-string/index.js @@ -0,0 +1,4 @@ +'use strict'; +const cryptoRandomString = require('crypto-random-string'); + +module.exports = () => cryptoRandomString(32); diff --git a/node_modules/unique-string/license b/node_modules/unique-string/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/unique-string/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/unique-string/package.json b/node_modules/unique-string/package.json new file mode 100644 index 00000000..b12954a9 --- /dev/null +++ b/node_modules/unique-string/package.json @@ -0,0 +1,44 @@ +{ + "name": "unique-string", + "version": "1.0.0", + "description": "Generate a unique random string", + "license": "MIT", + "repository": "sindresorhus/unique-string", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "unique", + "string", + "random", + "uniq", + "str", + "rand", + "text", + "id", + "identifier", + "slug", + "hex" + ], + "dependencies": { + "crypto-random-string": "^1.0.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/unique-string/readme.md b/node_modules/unique-string/readme.md new file mode 100644 index 00000000..5d5ac971 --- /dev/null +++ b/node_modules/unique-string/readme.md @@ -0,0 +1,32 @@ +# unique-string [![Build Status](https://travis-ci.org/sindresorhus/unique-string.svg?branch=master)](https://travis-ci.org/sindresorhus/unique-string) + +> Generate a unique random string + + +## Install + +``` +$ npm install --save unique-string +``` + + +## Usage + +```js +const uniqueString = require('unique-string'); + +uniqueString(); +//=> 'b4de2a49c8ffa3fbee04446f045483b2' +``` + + +## API + +### uniqueString() + +Returns a 32 character unique string. Matches the length of MD5, which is [unique enough](http://stackoverflow.com/a/2444336/64949) for non-crypto purposes. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/unpipe/HISTORY.md b/node_modules/unpipe/HISTORY.md new file mode 100644 index 00000000..85e0f8d7 --- /dev/null +++ b/node_modules/unpipe/HISTORY.md @@ -0,0 +1,4 @@ +1.0.0 / 2015-06-14 +================== + + * Initial release diff --git a/node_modules/unpipe/LICENSE b/node_modules/unpipe/LICENSE new file mode 100644 index 00000000..aed01382 --- /dev/null +++ b/node_modules/unpipe/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/unpipe/README.md b/node_modules/unpipe/README.md new file mode 100644 index 00000000..e536ad2c --- /dev/null +++ b/node_modules/unpipe/README.md @@ -0,0 +1,43 @@ +# unpipe + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Unpipe a stream from all destinations. + +## Installation + +```sh +$ npm install unpipe +``` + +## API + +```js +var unpipe = require('unpipe') +``` + +### unpipe(stream) + +Unpipes all destinations from a given stream. With stream 2+, this is +equivalent to `stream.unpipe()`. When used with streams 1 style streams +(typically Node.js 0.8 and below), this module attempts to undo the +actions done in `stream.pipe(dest)`. + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/unpipe.svg +[npm-url]: https://npmjs.org/package/unpipe +[node-image]: https://img.shields.io/node/v/unpipe.svg +[node-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/stream-utils/unpipe.svg +[travis-url]: https://travis-ci.org/stream-utils/unpipe +[coveralls-image]: https://img.shields.io/coveralls/stream-utils/unpipe.svg +[coveralls-url]: https://coveralls.io/r/stream-utils/unpipe?branch=master +[downloads-image]: https://img.shields.io/npm/dm/unpipe.svg +[downloads-url]: https://npmjs.org/package/unpipe diff --git a/node_modules/unpipe/index.js b/node_modules/unpipe/index.js new file mode 100644 index 00000000..15c3d97a --- /dev/null +++ b/node_modules/unpipe/index.js @@ -0,0 +1,69 @@ +/*! + * unpipe + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = unpipe + +/** + * Determine if there are Node.js pipe-like data listeners. + * @private + */ + +function hasPipeDataListeners(stream) { + var listeners = stream.listeners('data') + + for (var i = 0; i < listeners.length; i++) { + if (listeners[i].name === 'ondata') { + return true + } + } + + return false +} + +/** + * Unpipe a stream from all destinations. + * + * @param {object} stream + * @public + */ + +function unpipe(stream) { + if (!stream) { + throw new TypeError('argument stream is required') + } + + if (typeof stream.unpipe === 'function') { + // new-style + stream.unpipe() + return + } + + // Node.js 0.8 hack + if (!hasPipeDataListeners(stream)) { + return + } + + var listener + var listeners = stream.listeners('close') + + for (var i = 0; i < listeners.length; i++) { + listener = listeners[i] + + if (listener.name !== 'cleanup' && listener.name !== 'onclose') { + continue + } + + // invoke the listener + listener.call(stream) + } +} diff --git a/node_modules/unpipe/package.json b/node_modules/unpipe/package.json new file mode 100644 index 00000000..a2b73583 --- /dev/null +++ b/node_modules/unpipe/package.json @@ -0,0 +1,27 @@ +{ + "name": "unpipe", + "description": "Unpipe a stream from all destinations", + "version": "1.0.0", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "repository": "stream-utils/unpipe", + "devDependencies": { + "istanbul": "0.3.15", + "mocha": "2.2.5", + "readable-stream": "1.1.13" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/unset-value/LICENSE b/node_modules/unset-value/LICENSE new file mode 100644 index 00000000..ec85897e --- /dev/null +++ b/node_modules/unset-value/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, 2017, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/unset-value/README.md b/node_modules/unset-value/README.md new file mode 100644 index 00000000..f0fec3d3 --- /dev/null +++ b/node_modules/unset-value/README.md @@ -0,0 +1,131 @@ +# unset-value [![NPM version](https://img.shields.io/npm/v/unset-value.svg?style=flat)](https://www.npmjs.com/package/unset-value) [![NPM monthly downloads](https://img.shields.io/npm/dm/unset-value.svg?style=flat)](https://npmjs.org/package/unset-value) [![NPM total downloads](https://img.shields.io/npm/dt/unset-value.svg?style=flat)](https://npmjs.org/package/unset-value) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/unset-value.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/unset-value) + +> Delete nested properties from an object using dot notation. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save unset-value +``` + +## Usage + +```js +var unset = require('unset-value'); + +var obj = {a: {b: {c: 'd', e: 'f'}}}; +unset(obj, 'a.b.c'); +console.log(obj); +//=> {a: {b: {e: 'f'}}}; +``` + +## Examples + +### Updates the object when a property is deleted + +```js +var obj = {a: 'b'}; +unset(obj, 'a'); +console.log(obj); +//=> {} +``` + +### Returns true when a property is deleted + +```js +unset({a: 'b'}, 'a') // true +``` + +### Returns `true` when a property does not exist + +This is consistent with `delete` behavior in that it does not +throw when a property does not exist. + +```js +unset({a: {b: {c: 'd'}}}, 'd') // true +``` + +### delete nested values + +```js +var one = {a: {b: {c: 'd'}}}; +unset(one, 'a.b'); +console.log(one); +//=> {a: {}} + +var two = {a: {b: {c: 'd'}}}; +unset(two, 'a.b.c'); +console.log(two); +//=> {a: {b: {}}} + +var three = {a: {b: {c: 'd', e: 'f'}}}; +unset(three, 'a.b.c'); +console.log(three); +//=> {a: {b: {e: 'f'}}} +``` + +### throws on invalid args + +```js +unset(); +// 'expected an object.' +``` + +## About + +### Related projects + +* [get-value](https://www.npmjs.com/package/get-value): Use property paths (`a.b.c`) to get a nested value from an object. | [homepage](https://github.com/jonschlinkert/get-value "Use property paths (`a.b.c`) to get a nested value from an object.") +* [get-values](https://www.npmjs.com/package/get-values): Return an array of all values from the given object. | [homepage](https://github.com/jonschlinkert/get-values "Return an array of all values from the given object.") +* [omit-value](https://www.npmjs.com/package/omit-value): Omit properties from an object or deeply nested property of an object using object path… [more](https://github.com/jonschlinkert/omit-value) | [homepage](https://github.com/jonschlinkert/omit-value "Omit properties from an object or deeply nested property of an object using object path notation.") +* [put-value](https://www.npmjs.com/package/put-value): Update only existing values from an object, works with dot notation paths like `a.b.c` and… [more](https://github.com/tunnckocore/put-value#readme) | [homepage](https://github.com/tunnckocore/put-value#readme "Update only existing values from an object, works with dot notation paths like `a.b.c` and support deep nesting.") +* [set-value](https://www.npmjs.com/package/set-value): Create nested values and any intermediaries using dot notation (`'a.b.c'`) paths. | [homepage](https://github.com/jonschlinkert/set-value "Create nested values and any intermediaries using dot notation (`'a.b.c'`) paths.") +* [union-value](https://www.npmjs.com/package/union-value): Set an array of unique values as the property of an object. Supports setting deeply… [more](https://github.com/jonschlinkert/union-value) | [homepage](https://github.com/jonschlinkert/union-value "Set an array of unique values as the property of an object. Supports setting deeply nested properties using using object-paths/dot notation.") +* [upsert-value](https://www.npmjs.com/package/upsert-value): Update or set nested values and any intermediaries with dot notation (`'a.b.c'`) paths. | [homepage](https://github.com/doowb/upsert-value "Update or set nested values and any intermediaries with dot notation (`'a.b.c'`) paths.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 6 | [jonschlinkert](https://github.com/jonschlinkert) | +| 2 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +### Running tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.4.2, on February 25, 2017._ \ No newline at end of file diff --git a/node_modules/unset-value/index.js b/node_modules/unset-value/index.js new file mode 100644 index 00000000..a9190101 --- /dev/null +++ b/node_modules/unset-value/index.js @@ -0,0 +1,32 @@ +/*! + * unset-value + * + * Copyright (c) 2015, 2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +var isObject = require('isobject'); +var has = require('has-value'); + +module.exports = function unset(obj, prop) { + if (!isObject(obj)) { + throw new TypeError('expected an object.'); + } + if (obj.hasOwnProperty(prop)) { + delete obj[prop]; + return true; + } + + if (has(obj, prop)) { + var segs = prop.split('.'); + var last = segs.pop(); + while (segs.length && segs[segs.length - 1].slice(-1) === '\\') { + last = segs.pop().slice(0, -1) + '.' + last; + } + while (segs.length) obj = obj[prop = segs.shift()]; + return (delete obj[last]); + } + return true; +}; diff --git a/node_modules/unset-value/node_modules/has-value/LICENSE b/node_modules/unset-value/node_modules/has-value/LICENSE new file mode 100644 index 00000000..39245ac1 --- /dev/null +++ b/node_modules/unset-value/node_modules/has-value/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/unset-value/node_modules/has-value/README.md b/node_modules/unset-value/node_modules/has-value/README.md new file mode 100644 index 00000000..a927633e --- /dev/null +++ b/node_modules/unset-value/node_modules/has-value/README.md @@ -0,0 +1,130 @@ +# has-value [![NPM version](https://img.shields.io/npm/v/has-value.svg?style=flat)](https://www.npmjs.com/package/has-value) [![NPM downloads](https://img.shields.io/npm/dm/has-value.svg?style=flat)](https://npmjs.org/package/has-value) [![Build Status](https://img.shields.io/travis/jonschlinkert/has-value.svg?style=flat)](https://travis-ci.org/jonschlinkert/has-value) + +> Returns true if a value exists, false if empty. Works with deeply nested values using object paths. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install has-value --save +``` + +**Works for:** + +* booleans +* functions +* numbers (pass `true` as the last arg to treat zero as a value instead of falsey) +* strings +* nulls +* object +* arrays + +## Usage + +Works with nested object paths or a single value: + +```js +var hasValue = require('has-value'); + +hasValue({a: {b: {c: 'foo'}}} 'a.b.c'); +//=> true + +hasValue('a'); +//=> true + +hasValue(''); +//=> false + +hasValue(1); +//=> true + +hasValue(0); +//=> false + +hasValue(0, true); // pass `true` as the last arg to treat zero as a value +//=> true + +hasValue({a: 'a'}}); +//=> true + +hasValue({}}); +//=> false + +hasValue(['a']); +//=> true + +hasValue([]); +//=> false + +hasValue(function(foo) {}); // function length/arity +//=> true + +hasValue(function() {}); +//=> false + +hasValue(true); +hasValue(false); +//=> true +``` + +## isEmpty + +To do the opposite and test for empty values, do: + +```js +function isEmpty(o, isZero) { + return !hasValue.apply(hasValue, arguments); +} +``` + +## Related projects + +You might also be interested in these projects: + +* [get-object](https://www.npmjs.com/package/get-object): Get a property from an object using dot (object path) notation. | [homepage](https://github.com/jonschlinkert/get-object) +* [get-property](https://www.npmjs.com/package/get-property): Get a nested property or its value from an object using simple `a.b.c` paths. | [homepage](https://github.com/jonschlinkert/get-property) +* [get-value](https://www.npmjs.com/package/get-value): Use property paths (`a.b.c`) to get a nested value from an object. | [homepage](https://github.com/jonschlinkert/get-value) +* [set-value](https://www.npmjs.com/package/set-value): Create nested values and any intermediaries using dot notation (`'a.b.c'`) paths. | [homepage](https://github.com/jonschlinkert/set-value) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/has-value/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/has-value/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v, on March 27, 2016._ \ No newline at end of file diff --git a/node_modules/unset-value/node_modules/has-value/index.js b/node_modules/unset-value/node_modules/has-value/index.js new file mode 100644 index 00000000..90687c87 --- /dev/null +++ b/node_modules/unset-value/node_modules/has-value/index.js @@ -0,0 +1,19 @@ +/*! + * has-value + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var isObject = require('isobject'); +var hasValues = require('has-values'); +var get = require('get-value'); + +module.exports = function(obj, prop, noZero) { + if (isObject(obj)) { + return hasValues(get(obj, prop), noZero); + } + return hasValues(obj, prop); +}; diff --git a/node_modules/unset-value/node_modules/has-value/node_modules/isobject/LICENSE b/node_modules/unset-value/node_modules/has-value/node_modules/isobject/LICENSE new file mode 100644 index 00000000..39245ac1 --- /dev/null +++ b/node_modules/unset-value/node_modules/has-value/node_modules/isobject/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/unset-value/node_modules/has-value/node_modules/isobject/README.md b/node_modules/unset-value/node_modules/has-value/node_modules/isobject/README.md new file mode 100644 index 00000000..9dd897aa --- /dev/null +++ b/node_modules/unset-value/node_modules/has-value/node_modules/isobject/README.md @@ -0,0 +1,112 @@ +# isobject [![NPM version](https://img.shields.io/npm/v/isobject.svg?style=flat)](https://www.npmjs.com/package/isobject) [![NPM downloads](https://img.shields.io/npm/dm/isobject.svg?style=flat)](https://npmjs.org/package/isobject) [![Build Status](https://img.shields.io/travis/jonschlinkert/isobject.svg?style=flat)](https://travis-ci.org/jonschlinkert/isobject) + +Returns true if the value is an object and not an array or null. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install isobject --save +``` + +Use [is-plain-object](https://github.com/jonschlinkert/is-plain-object) if you want only objects that are created by the `Object` constructor. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install isobject +``` + +Install with [bower](http://bower.io/) + +```sh +$ bower install isobject +``` + +## Usage + +```js +var isObject = require('isobject'); +``` + +**True** + +All of the following return `true`: + +```js +isObject({}); +isObject(Object.create({})); +isObject(Object.create(Object.prototype)); +isObject(Object.create(null)); +isObject({}); +isObject(new Foo); +isObject(/foo/); +``` + +**False** + +All of the following return `false`: + +```js +isObject(); +isObject(function () {}); +isObject(1); +isObject([]); +isObject(undefined); +isObject(null); +``` + +## Related projects + +You might also be interested in these projects: + +[merge-deep](https://www.npmjs.com/package/merge-deep): Recursively merge values in a javascript object. | [homepage](https://github.com/jonschlinkert/merge-deep) + +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow) +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object) +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/isobject/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/isobject/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v0.9.0, on April 25, 2016._ \ No newline at end of file diff --git a/node_modules/unset-value/node_modules/has-value/node_modules/isobject/index.js b/node_modules/unset-value/node_modules/has-value/node_modules/isobject/index.js new file mode 100644 index 00000000..aa0dce0b --- /dev/null +++ b/node_modules/unset-value/node_modules/has-value/node_modules/isobject/index.js @@ -0,0 +1,14 @@ +/*! + * isobject + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var isArray = require('isarray'); + +module.exports = function isObject(val) { + return val != null && typeof val === 'object' && isArray(val) === false; +}; diff --git a/node_modules/unset-value/node_modules/has-value/node_modules/isobject/package.json b/node_modules/unset-value/node_modules/has-value/node_modules/isobject/package.json new file mode 100644 index 00000000..954f4113 --- /dev/null +++ b/node_modules/unset-value/node_modules/has-value/node_modules/isobject/package.json @@ -0,0 +1,67 @@ +{ + "name": "isobject", + "description": "Returns true if the value is an object and not an array or null.", + "version": "2.1.0", + "homepage": "https://github.com/jonschlinkert/isobject", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/isobject", + "bugs": { + "url": "https://github.com/jonschlinkert/isobject/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "isarray": "1.0.0" + }, + "devDependencies": { + "gulp-format-md": "^0.1.9", + "mocha": "^2.4.5" + }, + "keywords": [ + "check", + "is", + "is-object", + "isobject", + "kind", + "kind-of", + "kindof", + "native", + "object", + "type", + "typeof", + "value" + ], + "verb": { + "related": { + "list": [ + "merge-deep", + "extend-shallow", + "is-plain-object", + "kind-of" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/unset-value/node_modules/has-value/package.json b/node_modules/unset-value/node_modules/has-value/package.json new file mode 100644 index 00000000..88ff6edc --- /dev/null +++ b/node_modules/unset-value/node_modules/has-value/package.json @@ -0,0 +1,81 @@ +{ + "name": "has-value", + "version": "0.3.1", + "description": "Returns true if a value exists, false if empty. Works with deeply nested values using object paths.", + "homepage": "https://github.com/jonschlinkert/has-value", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/has-value", + "bugs": { + "url": "https://github.com/jonschlinkert/has-value/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "get-value": "^2.0.3", + "has-values": "^0.1.4", + "isobject": "^2.0.0" + }, + "devDependencies": { + "gulp-format-md": "^0.1.7", + "mocha": "^2.4.5" + }, + "keywords": [ + "array", + "boolean", + "empty", + "find", + "function", + "has", + "hasOwn", + "javascript", + "js", + "key", + "keys", + "node.js", + "null", + "number", + "object", + "properties", + "property", + "string", + "type", + "util", + "utilities", + "utility", + "value" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "get-object", + "get-property", + "get-value", + "set-value" + ] + }, + "reflinks": [ + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/unset-value/node_modules/has-values/LICENSE b/node_modules/unset-value/node_modules/has-values/LICENSE new file mode 100644 index 00000000..39245ac1 --- /dev/null +++ b/node_modules/unset-value/node_modules/has-values/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/unset-value/node_modules/has-values/README.md b/node_modules/unset-value/node_modules/has-values/README.md new file mode 100644 index 00000000..13319c51 --- /dev/null +++ b/node_modules/unset-value/node_modules/has-values/README.md @@ -0,0 +1,114 @@ +# has-values [![NPM version](https://img.shields.io/npm/v/has-values.svg?style=flat)](https://www.npmjs.com/package/has-values) [![NPM downloads](https://img.shields.io/npm/dm/has-values.svg?style=flat)](https://npmjs.org/package/has-values) [![Build Status](https://img.shields.io/travis/jonschlinkert/has-values.svg?style=flat)](https://travis-ci.org/jonschlinkert/has-values) + +> Returns true if any values exist, false if empty. Works for booleans, functions, numbers, strings, nulls, objects and arrays. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install has-values --save +``` + +## Usage + +```js +var hasValue = require('has-values'); + +hasValue('a'); +//=> true + +hasValue(''); +//=> false + +hasValue(1); +//=> true + +hasValue(0); +//=> false + +hasValue(0, true); // treat zero as a value +//=> true + +hasValue({a: 'a'}}); +//=> true + +hasValue({}}); +//=> false + +hasValue(['a']); +//=> true + +hasValue([]); +//=> false + +hasValue(function(foo) {}); // function length/arity +//=> true + +hasValue(function() {}); +//=> false + +hasValue(true); +hasValue(false); +//=> true +``` + +## isEmpty + +To test for empty values, do: + +```js +function isEmpty(o, isZero) { + return !hasValue(o, isZero); +} +``` + +## Related projects + +You might also be interested in these projects: + +* [has-value](https://www.npmjs.com/package/has-value): Returns true if a value exists, false if empty. Works with deeply nested values using… [more](https://www.npmjs.com/package/has-value) | [homepage](https://github.com/jonschlinkert/has-value) +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object) +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/has-values/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/has-values/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v, on March 27, 2016._ \ No newline at end of file diff --git a/node_modules/unset-value/node_modules/has-values/index.js b/node_modules/unset-value/node_modules/has-values/index.js new file mode 100644 index 00000000..6d04ba1f --- /dev/null +++ b/node_modules/unset-value/node_modules/has-values/index.js @@ -0,0 +1,36 @@ +/*! + * has-values + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +module.exports = function hasValue(o, noZero) { + if (o === null || o === undefined) { + return false; + } + + if (typeof o === 'boolean') { + return true; + } + + if (typeof o === 'number') { + if (o === 0 && noZero === true) { + return false; + } + return true; + } + + if (o.length !== undefined) { + return o.length !== 0; + } + + for (var key in o) { + if (o.hasOwnProperty(key)) { + return true; + } + } + return false; +}; diff --git a/node_modules/unset-value/node_modules/has-values/package.json b/node_modules/unset-value/node_modules/has-values/package.json new file mode 100644 index 00000000..519a2c89 --- /dev/null +++ b/node_modules/unset-value/node_modules/has-values/package.json @@ -0,0 +1,75 @@ +{ + "name": "has-values", + "version": "0.1.4", + "description": "Returns true if any values exist, false if empty. Works for booleans, functions, numbers, strings, nulls, objects and arrays. ", + "homepage": "https://github.com/jonschlinkert/has-values", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/has-values", + "bugs": { + "url": "https://github.com/jonschlinkert/has-values/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.7", + "mocha": "^2.4.5" + }, + "keywords": [ + "array", + "boolean", + "empty", + "find", + "function", + "has", + "hasOwn", + "javascript", + "js", + "key", + "keys", + "node.js", + "null", + "number", + "object", + "properties", + "property", + "string", + "type", + "util", + "utilities", + "utility", + "value" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "has-value", + "isobject", + "is-plain-object" + ] + }, + "reflinks": [ + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/unset-value/package.json b/node_modules/unset-value/package.json new file mode 100644 index 00000000..0753ba6d --- /dev/null +++ b/node_modules/unset-value/package.json @@ -0,0 +1,71 @@ +{ + "name": "unset-value", + "description": "Delete nested properties from an object using dot notation.", + "version": "1.0.0", + "homepage": "https://github.com/jonschlinkert/unset-value", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + " (https://github.com/wtgtybhertgeghgtwtg)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "jonschlinkert/unset-value", + "bugs": { + "url": "https://github.com/jonschlinkert/unset-value/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "has-value": "^0.3.1", + "isobject": "^3.0.0" + }, + "devDependencies": { + "gulp-format-md": "^0.1.11", + "mocha": "*", + "should": "*" + }, + "keywords": [ + "del", + "delete", + "key", + "object", + "omit", + "prop", + "property", + "remove", + "unset", + "value" + ], + "verb": { + "related": { + "list": [ + "get-value", + "get-values", + "omit-value", + "put-value", + "set-value", + "union-value", + "upsert-value" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/unzip-response/index.js b/node_modules/unzip-response/index.js new file mode 100644 index 00000000..e97b9937 --- /dev/null +++ b/node_modules/unzip-response/index.js @@ -0,0 +1,36 @@ +'use strict'; +const PassThrough = require('stream').PassThrough; +const zlib = require('zlib'); + +module.exports = res => { + // TODO: use Array#includes when targeting Node.js 6 + if (['gzip', 'deflate'].indexOf(res.headers['content-encoding']) === -1) { + return res; + } + + const unzip = zlib.createUnzip(); + const stream = new PassThrough(); + + stream.httpVersion = res.httpVersion; + stream.headers = res.headers; + stream.rawHeaders = res.rawHeaders; + stream.trailers = res.trailers; + stream.rawTrailers = res.rawTrailers; + stream.setTimeout = res.setTimeout.bind(res); + stream.statusCode = res.statusCode; + stream.statusMessage = res.statusMessage; + stream.socket = res.socket; + + unzip.on('error', err => { + if (err.code === 'Z_BUF_ERROR') { + stream.end(); + return; + } + + stream.emit('error', err); + }); + + res.pipe(unzip).pipe(stream); + + return stream; +}; diff --git a/node_modules/unzip-response/license b/node_modules/unzip-response/license new file mode 100644 index 00000000..32a16ce3 --- /dev/null +++ b/node_modules/unzip-response/license @@ -0,0 +1,21 @@ +`The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/unzip-response/package.json b/node_modules/unzip-response/package.json new file mode 100644 index 00000000..bc14cf27 --- /dev/null +++ b/node_modules/unzip-response/package.json @@ -0,0 +1,49 @@ +{ + "name": "unzip-response", + "version": "2.0.1", + "description": "Unzip a HTTP response if needed", + "license": "MIT", + "repository": "sindresorhus/unzip-response", + "maintainers": [ + { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + { + "name": "Vsevolod Strukchinsky", + "email": "floatdrop@gmail.com", + "url": "github.com/floatdrop" + } + ], + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "http", + "unzip", + "zlib", + "gzip", + "deflate", + "incoming", + "message", + "response", + "stream" + ], + "devDependencies": { + "ava": "*", + "get-stream": "^2.3.0", + "pify": "^2.3.0", + "rfpify": "^1.0.0", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/unzip-response/readme.md b/node_modules/unzip-response/readme.md new file mode 100644 index 00000000..87a62c4b --- /dev/null +++ b/node_modules/unzip-response/readme.md @@ -0,0 +1,29 @@ +# unzip-response [![Build Status](https://travis-ci.org/sindresorhus/unzip-response.svg?branch=master)](https://travis-ci.org/sindresorhus/unzip-response) + +> Unzip a HTTP response if needed + +Unzips the response from [`http.request`](https://nodejs.org/api/http.html#http_http_request_options_callback) if it's gzipped/deflated, otherwise just passes it through. + + +## Install + +``` +$ npm install --save unzip-response +``` + + +## Usage + +```js +const http = require('http'); +const unzipResponse = require('unzip-response'); + +http.get('http://sindresorhus.com', res => { + res = unzipResponse(res); +}); +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/upath/LICENSE b/node_modules/upath/LICENSE new file mode 100755 index 00000000..1ce5a29b --- /dev/null +++ b/node_modules/upath/LICENSE @@ -0,0 +1,22 @@ +Copyright(c) 2014-2017 Angelos Pikoulas (agelos.pikoulas@gmail.com) + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/upath/build/code/upath.js b/node_modules/upath/build/code/upath.js new file mode 100644 index 00000000..3d77ecda --- /dev/null +++ b/node_modules/upath/build/code/upath.js @@ -0,0 +1,171 @@ +/** +* upath http://github.com/anodynos/upath/ +* +* A proxy to `path`, replacing `\` with `/` for all results & new methods to normalize & join keeping leading `./` and add, change, default, trim file extensions. +* Version 1.1.1 - Compiled on 2019-03-07 10:45:51 +* Repository git://github.com/anodynos/upath +* Copyright(c) 2019 Angelos Pikoulas +* License MIT +*/ + +// Generated by uRequire v0.7.0-beta.33 target: 'lib' template: 'nodejs' + + +var VERSION = '1.1.1'; // injected by urequire-rc-inject-version + +var extraFn, extraFunctions, isFunction, isString, isValidExt, name, path, propName, propValue, toUnix, upath, slice = [].slice, indexOf = [].indexOf || function (item) { + for (var i = 0, l = this.length; i < l; i++) { + if (i in this && this[i] === item) + return i; + } + return -1; + }, hasProp = {}.hasOwnProperty; +path = require("path"); +isFunction = function (val) { + return val instanceof Function; +}; +isString = function (val) { + return typeof val === "string" || !!val && typeof val === "object" && Object.prototype.toString.call(val) === "[object String]"; +}; +upath = exports; +upath.VERSION = typeof VERSION !== "undefined" && VERSION !== null ? VERSION : "NO-VERSION"; +toUnix = function (p) { + var double; + p = p.replace(/\\/g, "/"); + double = /\/\//; + while (p.match(double)) { + p = p.replace(double, "/"); + } + return p; +}; +for (propName in path) { + propValue = path[propName]; + if (isFunction(propValue)) { + upath[propName] = function (propName) { + return function () { + var args, result; + args = 1 <= arguments.length ? slice.call(arguments, 0) : []; + args = args.map(function (p) { + if (isString(p)) { + return toUnix(p); + } else { + return p; + } + }); + result = path[propName].apply(path, args); + if (isString(result)) { + return toUnix(result); + } else { + return result; + } + }; + }(propName); + } else { + upath[propName] = propValue; + } +} +upath.sep = "/"; +extraFunctions = { + toUnix: toUnix, + normalizeSafe: function (p) { + p = toUnix(p); + if (p.startsWith("./")) { + if (p.startsWith("./..") || p === "./") { + return upath.normalize(p); + } else { + return "./" + upath.normalize(p); + } + } else { + return upath.normalize(p); + } + }, + normalizeTrim: function (p) { + p = upath.normalizeSafe(p); + if (p.endsWith("/")) { + return p.slice(0, +(p.length - 2) + 1 || 9000000000); + } else { + return p; + } + }, + joinSafe: function () { + var p, result; + p = 1 <= arguments.length ? slice.call(arguments, 0) : []; + result = upath.join.apply(null, p); + if (p[0].startsWith("./") && !result.startsWith("./")) { + result = "./" + result; + } + return result; + }, + addExt: function (file, ext) { + if (!ext) { + return file; + } else { + if (ext[0] !== ".") { + ext = "." + ext; + } + return file + (file.endsWith(ext) ? "" : ext); + } + }, + trimExt: function (filename, ignoreExts, maxSize) { + var oldExt; + if (maxSize == null) { + maxSize = 7; + } + oldExt = upath.extname(filename); + if (isValidExt(oldExt, ignoreExts, maxSize)) { + return filename.slice(0, +(filename.length - oldExt.length - 1) + 1 || 9000000000); + } else { + return filename; + } + }, + removeExt: function (filename, ext) { + if (!ext) { + return filename; + } else { + ext = ext[0] === "." ? ext : "." + ext; + if (upath.extname(filename) === ext) { + return upath.trimExt(filename); + } else { + return filename; + } + } + }, + changeExt: function (filename, ext, ignoreExts, maxSize) { + if (maxSize == null) { + maxSize = 7; + } + return upath.trimExt(filename, ignoreExts, maxSize) + (!ext ? "" : ext[0] === "." ? ext : "." + ext); + }, + defaultExt: function (filename, ext, ignoreExts, maxSize) { + var oldExt; + if (maxSize == null) { + maxSize = 7; + } + oldExt = upath.extname(filename); + if (isValidExt(oldExt, ignoreExts, maxSize)) { + return filename; + } else { + return upath.addExt(filename, ext); + } + } +}; +isValidExt = function (ext, ignoreExts, maxSize) { + if (ignoreExts == null) { + ignoreExts = []; + } + return ext && ext.length <= maxSize && indexOf.call(ignoreExts.map(function (e) { + return (e && e[0] !== "." ? "." : "") + e; + }), ext) < 0; +}; +for (name in extraFunctions) { + if (!hasProp.call(extraFunctions, name)) + continue; + extraFn = extraFunctions[name]; + if (upath[name] !== void 0) { + throw new Error("path." + name + " already exists."); + } else { + upath[name] = extraFn; + } +} + +; \ No newline at end of file diff --git a/node_modules/upath/package.json b/node_modules/upath/package.json new file mode 100644 index 00000000..0f896ea7 --- /dev/null +++ b/node_modules/upath/package.json @@ -0,0 +1,60 @@ +{ + "name": "upath", + "description": "A proxy to `path`, replacing `\\` with `/` for all results & new methods to normalize & join keeping leading `./` and add, change, default, trim file extensions.", + "version": "1.1.2", + "homepage": "http://github.com/anodynos/upath/", + "author": { + "name": "Angelos Pikoulas", + "email": "agelos.pikoulas@gmail.com" + }, + "license": "MIT", + "keywords": [ + "path", + "unix", + "windows", + "extension", + "file extension", + "replace extension", + "change extension", + "trim extension", + "add extension", + "default extension" + ], + "repository": { + "type": "git", + "url": "git://github.com/anodynos/upath" + }, + "bugs": { + "url": "http://github.com/anodynos/upath/issues", + "email": "agelos.pikoulas@gmail.com" + }, + "main": "./build/code/upath.js", + "types": "./upath.d.ts", + "preferGlobal": false, + "scripts": { + "test": "grunt", + "build": "grunt lib" + }, + "directories": { + "doc": "./doc", + "dist": "./build" + }, + "engines": { + "node": ">=4", + "yarn": "*" + }, + "devDependencies": { + "chai": "~4.0.2", + "coffee-script": "1.12.6", + "grunt": "0.4.5", + "grunt-contrib-watch": "~1.0.0", + "grunt-urequire": "0.7.x", + "lodash": "^4.17.4", + "mocha": "~3.4.2", + "uberscore": "0.0.19", + "underscore.string": "~3.3.4", + "urequire": "0.7.0-beta.33", + "urequire-ab-specrunner": "^0.2.5", + "urequire-rc-inject-version": "^0.1.6" + } +} diff --git a/node_modules/upath/readme.md b/node_modules/upath/readme.md new file mode 100644 index 00000000..31ebb4ad --- /dev/null +++ b/node_modules/upath/readme.md @@ -0,0 +1,335 @@ +# upath v1.1.2 + +[![Build Status](https://travis-ci.org/anodynos/upath.svg?branch=master)](https://travis-ci.org/anodynos/upath) +[![Up to date Status](https://david-dm.org/anodynos/upath.png)](https://david-dm.org/anodynos/upath) + +A drop-in replacement / proxy to nodejs's `path` that: + + * Replaces the windows `\` with the unix `/` in all string params & results. This has significant positives - see below. + + * Adds **filename extensions** functions `addExt`, `trimExt`, `removeExt`, `changeExt`, and `defaultExt`. + + * Add a `normalizeSafe` function to preserve any meaningful leading `./` & a `normalizeTrim` which additionally trims any useless ending `/`. + + * Plus a helper `toUnix` that simply converts `\` to `/` and consolidates duplicates. + +**Useful note: these docs are actually auto generated from [specs](https://github.com/anodynos/upath/blob/master/source/spec/upath-spec.coffee), running on Linux.** + +Notes: + + * `upath.sep` is set to `'/'` for seamless replacement (as of 1.0.3). + + * upath has no runtime dependencies, except built-in `path` (as of 1.0.4) + + * travis-ci tested in node versions 4 to 10 + +## Why ? + +Normal `path` doesn't convert paths to a unified format (ie `/`) before calculating paths (`normalize`, `join`), which can lead to numerous problems. +Also path joining, normalization etc on the two formats is not consistent, depending on where it runs. Running `path` on Windows yields different results than when it runs on Linux / Mac. + +In general, if you code your paths logic while developing on Unix/Mac and it runs on Windows, you may run into problems when using `path`. + +Note that using **Unix `/` on Windows** works perfectly inside nodejs (and other languages), so there's no reason to stick to the Windows legacy at all. + +##### Examples / specs + + +Check out the different (improved) behavior to vanilla `path`: + + `upath.normalize(path)` --returns--> + + ✓ `'c:/windows/nodejs/path'` ---> `'c:/windows/nodejs/path'` // equal to `path.normalize()` + ✓ `'c:/windows/../nodejs/path'` ---> `'c:/nodejs/path'` // equal to `path.normalize()` + ✓ `'c:\\windows\\nodejs\\path'` ---> `'c:/windows/nodejs/path'` // `path.normalize()` gives `'c:\windows\nodejs\path'` + ✓ `'c:\\windows\\..\\nodejs\\path'` ---> `'c:/nodejs/path'` // `path.normalize()` gives `'c:\windows\..\nodejs\path'` + ✓ `'//windows\\unix/mixed'` ---> `'/windows/unix/mixed'` // `path.normalize()` gives `'/windows\unix/mixed'` + ✓ `'\\windows//unix/mixed'` ---> `'/windows/unix/mixed'` // `path.normalize()` gives `'\windows/unix/mixed'` + ✓ `'////\\windows\\..\\unix/mixed/'` ---> `'/unix/mixed/'` // `path.normalize()` gives `'/\windows\..\unix/mixed/'` + + +Joining paths can also be a problem: + + `upath.join(paths...)` --returns--> + + ✓ `'some/nodejs/deep', '../path'` ---> `'some/nodejs/path'` // equal to `path.join()` + ✓ `'some/nodejs\\windows', '../path'` ---> `'some/nodejs/path'` // `path.join()` gives `'some/path'` + ✓ `'some\\windows\\only', '..\\path'` ---> `'some/windows/path'` // `path.join()` gives `'some\windows\only/..\path'` + + +Parsing with `path.parse()` should also be consistent across OSes: + + `upath.parse(path)` --returns--> + + ✓ `'c:\Windows\Directory\somefile.ext'` ---> `{ root: '', dir: 'c:/Windows/Directory', base: 'somefile.ext', ext: '.ext', name: 'somefile' }` + // `path.parse()` gives `'{ root: '', dir: '', base: 'c:\\Windows\\Directory\\somefile.ext', ext: '.ext', name: 'c:\\Windows\\Directory\\somefile' }'` + ✓ `'/root/of/unix/somefile.ext'` ---> `{ root: '/', dir: '/root/of/unix', base: 'somefile.ext', ext: '.ext', name: 'somefile' }` // equal to `path.parse()` + + +## Added functions + + +#### `upath.toUnix(path)` + +Just converts all `` to `/` and consolidates duplicates, without performing any normalization. + +##### Examples / specs + + `upath.toUnix(path)` --returns--> + + ✓ `'.//windows\//unix//mixed////'` ---> `'./windows/unix/mixed/'` + ✓ `'..///windows\..\\unix/mixed'` ---> `'../windows/../unix/mixed'` + + +#### `upath.normalizeSafe(path)` + +Exactly like `path.normalize(path)`, but it keeps the first meaningful `./`. + +Note that the unix `/` is returned everywhere, so windows `\` is always converted to unix `/`. + +##### Examples / specs & how it differs from vanilla `path` + + `upath.normalizeSafe(path)` --returns--> + + ✓ `''` ---> `'.'` // equal to `path.normalize()` + ✓ `'.'` ---> `'.'` // equal to `path.normalize()` + ✓ `'./'` ---> `'./'` // equal to `path.normalize()` + ✓ `'.//'` ---> `'./'` // equal to `path.normalize()` + ✓ `'.\\'` ---> `'./'` // `path.normalize()` gives `'.\'` + ✓ `'.\\//'` ---> `'./'` // `path.normalize()` gives `'.\/'` + ✓ `'./..'` ---> `'..'` // equal to `path.normalize()` + ✓ `'.//..'` ---> `'..'` // equal to `path.normalize()` + ✓ `'./../'` ---> `'../'` // equal to `path.normalize()` + ✓ `'.\\..\\'` ---> `'../'` // `path.normalize()` gives `'.\..\'` + ✓ `'./../dep'` ---> `'../dep'` // equal to `path.normalize()` + ✓ `'../dep'` ---> `'../dep'` // equal to `path.normalize()` + ✓ `'../path/dep'` ---> `'../path/dep'` // equal to `path.normalize()` + ✓ `'../path/../dep'` ---> `'../dep'` // equal to `path.normalize()` + ✓ `'dep'` ---> `'dep'` // equal to `path.normalize()` + ✓ `'path//dep'` ---> `'path/dep'` // equal to `path.normalize()` + ✓ `'./dep'` ---> `'./dep'` // `path.normalize()` gives `'dep'` + ✓ `'./path/dep'` ---> `'./path/dep'` // `path.normalize()` gives `'path/dep'` + ✓ `'./path/../dep'` ---> `'./dep'` // `path.normalize()` gives `'dep'` + ✓ `'.//windows\\unix/mixed/'` ---> `'./windows/unix/mixed/'` // `path.normalize()` gives `'windows\unix/mixed/'` + ✓ `'..//windows\\unix/mixed'` ---> `'../windows/unix/mixed'` // `path.normalize()` gives `'../windows\unix/mixed'` + ✓ `'windows\\unix/mixed/'` ---> `'windows/unix/mixed/'` // `path.normalize()` gives `'windows\unix/mixed/'` + ✓ `'..//windows\\..\\unix/mixed'` ---> `'../unix/mixed'` // `path.normalize()` gives `'../windows\..\unix/mixed'` + + +#### `upath.normalizeTrim(path)` + +Exactly like `path.normalizeSafe(path)`, but it trims any useless ending `/`. + +##### Examples / specs + + `upath.normalizeTrim(path)` --returns--> + + ✓ `'./'` ---> `'.'` // `upath.normalizeSafe()` gives `'./'` + ✓ `'./../'` ---> `'..'` // `upath.normalizeSafe()` gives `'../'` + ✓ `'./../dep/'` ---> `'../dep'` // `upath.normalizeSafe()` gives `'../dep/'` + ✓ `'path//dep\\'` ---> `'path/dep'` // `upath.normalizeSafe()` gives `'path/dep/'` + ✓ `'.//windows\\unix/mixed/'` ---> `'./windows/unix/mixed'` // `upath.normalizeSafe()` gives `'./windows/unix/mixed/'` + + +#### `upath.joinSafe([path1][, path2][, ...])` + +Exactly like `path.join()`, but it keeps the first meaningful `./`. + +Note that the unix `/` is returned everywhere, so windows `\` is always converted to unix `/`. + +##### Examples / specs & how it differs from vanilla `path` + + `upath.joinSafe(path)` --returns--> + + ✓ `'some/nodejs/deep', '../path'` ---> `'some/nodejs/path'` // equal to `path.join()` + ✓ `'./some/local/unix/', '../path'` ---> `'./some/local/path'` // `path.join()` gives `'some/local/path'` + ✓ `'./some\\current\\mixed', '..\\path'` ---> `'./some/current/path'` // `path.join()` gives `'some\current\mixed/..\path'` + ✓ `'../some/relative/destination', '..\\path'` ---> `'../some/relative/path'` // `path.join()` gives `'../some/relative/destination/..\path'` + + +## Added functions for *filename extension* manipulation. + +**Happy notes:** + + In all functions you can: + + * use both `.ext` & `ext` - the dot `.` on the extension is always adjusted correctly. + + * omit the `ext` param (pass null/undefined/empty string) and the common sense thing will happen. + + * ignore specific extensions from being considered as valid ones (eg `.min`, `.dev` `.aLongExtIsNotAnExt` etc), hence no trimming or replacement takes place on them. + + + +#### `upath.addExt(filename, [ext])` + +Adds `.ext` to `filename`, but only if it doesn't already have the exact extension. + +##### Examples / specs + + `upath.addExt(filename, 'js')` --returns--> + + ✓ `'myfile/addExt'` ---> `'myfile/addExt.js'` + ✓ `'myfile/addExt.txt'` ---> `'myfile/addExt.txt.js'` + ✓ `'myfile/addExt.js'` ---> `'myfile/addExt.js'` + ✓ `'myfile/addExt.min.'` ---> `'myfile/addExt.min..js'` + + +It adds nothing if no `ext` param is passed. + + `upath.addExt(filename)` --returns--> + + ✓ `'myfile/addExt'` ---> `'myfile/addExt'` + ✓ `'myfile/addExt.txt'` ---> `'myfile/addExt.txt'` + ✓ `'myfile/addExt.js'` ---> `'myfile/addExt.js'` + ✓ `'myfile/addExt.min.'` ---> `'myfile/addExt.min.'` + + +#### `upath.trimExt(filename, [ignoreExts], [maxSize=7])` + +Trims a filename's extension. + + * Extensions are considered to be up to `maxSize` chars long, counting the dot (defaults to 7). + + * An `Array` of `ignoreExts` (eg `['.min']`) prevents these from being considered as extension, thus are not trimmed. + +##### Examples / specs + + `upath.trimExt(filename)` --returns--> + + ✓ `'my/trimedExt.txt'` ---> `'my/trimedExt'` + ✓ `'my/trimedExt'` ---> `'my/trimedExt'` + ✓ `'my/trimedExt.min'` ---> `'my/trimedExt'` + ✓ `'my/trimedExt.min.js'` ---> `'my/trimedExt.min'` + ✓ `'../my/trimedExt.longExt'` ---> `'../my/trimedExt.longExt'` + + +It is ignoring `.min` & `.dev` as extensions, and considers exts with up to 8 chars. + + `upath.removeExt(filename, ['min', '.dev'], 8)` --returns--> + + ✓ `'my/trimedExt.txt'` ---> `'my/trimedExt'` + ✓ `'my/trimedExt.min'` ---> `'my/trimedExt.min'` + ✓ `'my/trimedExt.dev'` ---> `'my/trimedExt.dev'` + ✓ `'../my/trimedExt.longExt'` ---> `'../my/trimedExt'` + ✓ `'../my/trimedExt.longRExt'` ---> `'../my/trimedExt.longRExt'` + + +#### `upath.removeExt(filename, ext)` + +Removes the specific `ext` extension from filename, if it has it. Otherwise it leaves it as is. +As in all upath functions, it be `.ext` or `ext`. + +##### Examples / specs + + `upath.removeExt(filename, '.js')` --returns--> + + ✓ `'removedExt.js'` ---> `'removedExt'` + ✓ `'removedExt.txt.js'` ---> `'removedExt.txt'` + ✓ `'notRemoved.txt'` ---> `'notRemoved.txt'` + + +#### `upath.changeExt(filename, [ext], [ignoreExts], [maxSize=7])` + +Changes a filename's extension to `ext`. If it has no (valid) extension, it adds it. + + * Valid extensions are considered to be up to `maxSize` chars long, counting the dot (defaults to 7). + + * An `Array` of `ignoreExts` (eg `['.min']`) prevents these from being considered as extension, thus are not changed - the new extension is added instead. + +##### Examples / specs + + `upath.changeExt(filename, '.js')` --returns--> + + ✓ `'my/module.min'` ---> `'my/module.js'` + ✓ `'my/module.coffee'` ---> `'my/module.js'` + ✓ `'my/module'` ---> `'my/module.js'` + ✓ `'file/withDot.'` ---> `'file/withDot.js'` + ✓ `'file/change.longExt'` ---> `'file/change.longExt.js'` + + +If no `ext` param is given, it trims the current extension (if any). + + `upath.changeExt(filename)` --returns--> + + ✓ `'my/module.min'` ---> `'my/module'` + ✓ `'my/module.coffee'` ---> `'my/module'` + ✓ `'my/module'` ---> `'my/module'` + ✓ `'file/withDot.'` ---> `'file/withDot'` + ✓ `'file/change.longExt'` ---> `'file/change.longExt'` + + +It is ignoring `.min` & `.dev` as extensions, and considers exts with up to 8 chars. + + `upath.changeExt(filename, 'js', ['min', '.dev'], 8)` --returns--> + + ✓ `'my/module.coffee'` ---> `'my/module.js'` + ✓ `'file/notValidExt.min'` ---> `'file/notValidExt.min.js'` + ✓ `'file/notValidExt.dev'` ---> `'file/notValidExt.dev.js'` + ✓ `'file/change.longExt'` ---> `'file/change.js'` + ✓ `'file/change.longRExt'` ---> `'file/change.longRExt.js'` + + +#### `upath.defaultExt(filename, [ext], [ignoreExts], [maxSize=7])` + +Adds `.ext` to `filename`, only if it doesn't already have _any_ *old* extension. + + * (Old) extensions are considered to be up to `maxSize` chars long, counting the dot (defaults to 7). + + * An `Array` of `ignoreExts` (eg `['.min']`) will force adding default `.ext` even if one of these is present. + +##### Examples / specs + + `upath.defaultExt(filename, 'js')` --returns--> + + ✓ `'fileWith/defaultExt'` ---> `'fileWith/defaultExt.js'` + ✓ `'fileWith/defaultExt.js'` ---> `'fileWith/defaultExt.js'` + ✓ `'fileWith/defaultExt.min'` ---> `'fileWith/defaultExt.min'` + ✓ `'fileWith/defaultExt.longExt'` ---> `'fileWith/defaultExt.longExt.js'` + + +If no `ext` param is passed, it leaves filename intact. + + `upath.defaultExt(filename)` --returns--> + + ✓ `'fileWith/defaultExt'` ---> `'fileWith/defaultExt'` + ✓ `'fileWith/defaultExt.js'` ---> `'fileWith/defaultExt.js'` + ✓ `'fileWith/defaultExt.min'` ---> `'fileWith/defaultExt.min'` + ✓ `'fileWith/defaultExt.longExt'` ---> `'fileWith/defaultExt.longExt'` + + +It is ignoring `.min` & `.dev` as extensions, and considers exts with up to 8 chars. + + `upath.defaultExt(filename, 'js', ['min', '.dev'], 8)` --returns--> + + ✓ `'fileWith/defaultExt'` ---> `'fileWith/defaultExt.js'` + ✓ `'fileWith/defaultExt.min'` ---> `'fileWith/defaultExt.min.js'` + ✓ `'fileWith/defaultExt.dev'` ---> `'fileWith/defaultExt.dev.js'` + ✓ `'fileWith/defaultExt.longExt'` ---> `'fileWith/defaultExt.longExt'` + ✓ `'fileWith/defaultExt.longRext'` ---> `'fileWith/defaultExt.longRext.js'` + + +Copyright(c) 2014-2017 Angelos Pikoulas (agelos.pikoulas@gmail.com) + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +97 passing (33ms) diff --git a/node_modules/upath/upath.d.ts b/node_modules/upath/upath.d.ts new file mode 100644 index 00000000..778d70a8 --- /dev/null +++ b/node_modules/upath/upath.d.ts @@ -0,0 +1,239 @@ +declare module "upath" { + + /** + * A parsed path object generated by path.parse() or consumed by path.format(). + */ + export interface ParsedPath { + /** + * The root of the path such as '/' or 'c:\' + */ + root: string; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir: string; + /** + * The file name including extension (if any) such as 'index.html' + */ + base: string; + /** + * The file extension (if any) such as '.html' + */ + ext: string; + /** + * The file name without extension (if any) such as 'index' + */ + name: string; + } + + /** + * Version of the library + */ + export var VERSION: string; + + /** + * Just converts all `to/` and consolidates duplicates, without performing any normalization. + * + * @param p string path to convert to unix. + */ + export function toUnix(p: string): string; + + /** + * Exactly like path.normalize(path), but it keeps the first meaningful ./. + * + * Note that the unix / is returned everywhere, so windows \ is always converted to unix /. + * + * @param p string path to normalize. + */ + export function normalizeSafe(p: string): string; + + /** + * Exactly like path.normalizeSafe(path), but it trims any useless ending /. + * + * @param p string path to normalize + */ + export function normalizeTrim(p: string): string; + + /** + * Exactly like path.join(), but it keeps the first meaningful ./. + * + * Note that the unix / is returned everywhere, so windows \ is always converted to unix /. + * + * @param paths string paths to join + */ + export function joinSafe(...p: any[]): string; + + /** + * Adds .ext to filename, but only if it doesn't already have the exact extension. + * + * @param file string filename to add extension to + * @param ext string extension to add + */ + export function addExt(file: string, ext: string): string; + + /** + * Trims a filename's extension. + * + * Extensions are considered to be up to maxSize chars long, counting the dot (defaults to 7). + * + * An Array of ignoreExts (eg ['.min']) prevents these from being considered as extension, thus are not trimmed. + * + * @param filename string filename to trim it's extension + * @param ignoreExts array extensions to ignore + * @param maxSize number max length of the extension + */ + export function trimExt(filename: string, ignoreExts?: string[], maxSize?: number): string; + + /** + * Removes the specific ext extension from filename, if it has it. Otherwise it leaves it as is. As in all upath functions, it be .ext or ext. + * + * @param file string filename to remove extension to + * @param ext string extension to remove + */ + export function removeExt(filename: string, ext: string): string; + + /** + * Changes a filename's extension to ext. If it has no (valid) extension, it adds it. + * + * Valid extensions are considered to be up to maxSize chars long, counting the dot (defaults to 7). + * + * An Array of ignoreExts (eg ['.min']) prevents these from being considered as extension, thus are not changed - the new extension is added instead. + * + * @param filename string filename to change it's extension + * @param ext string extension to change to + * @param ignoreExts array extensions to ignore + * @param maxSize number max length of the extension + */ + export function changeExt(filename: string, ext: string, ignoreExts?: string[], maxSize?: number): string; + + /** + * Adds .ext to filename, only if it doesn't already have any old extension. + * + * (Old) extensions are considered to be up to maxSize chars long, counting the dot (defaults to 7). + * + * An Array of ignoreExts (eg ['.min']) will force adding default .ext even if one of these is present. + * + * @param filename string filename to default to it's extension + * @param ext string extension to default to + * @param ignoreExts array extensions to ignore + * @param maxSize number max length of the extension + */ + export function defaultExt(filename: string, ext: string, ignoreExts?: string[], maxSize?: number): string; + + /** + * Normalize a string path, reducing '..' and '.' parts. + * When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used. + * + * @param p string path to normalize. + */ + export function normalize(p: string): string; + /** + * Join all arguments together and normalize the resulting path. + * Arguments must be strings. In v0.8, non-string arguments were silently ignored. In v0.10 and up, an exception is thrown. + * + * @param paths string paths to join. + */ + export function join(...paths: any[]): string; + /** + * Join all arguments together and normalize the resulting path. + * Arguments must be strings. In v0.8, non-string arguments were silently ignored. In v0.10 and up, an exception is thrown. + * + * @param paths string paths to join. + */ + export function join(...paths: string[]): string; + /** + * The right-most parameter is considered {to}. Other parameters are considered an array of {from}. + * + * Starting from leftmost {from} parameter, resolves {to} to an absolute path. + * + * If {to} isn't already absolute, {from} arguments are prepended in right to left order, until an absolute path is found. If after using all {from} paths still no absolute path is found, the current working directory is used as well. The resulting path is normalized, and trailing slashes are removed unless the path gets resolved to the root directory. + * + * @param pathSegments string paths to join. Non-string arguments are ignored. + */ + export function resolve(...pathSegments: any[]): string; + /** + * Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory. + * + * @param path path to test. + */ + export function isAbsolute(path: string): boolean; + /** + * Solve the relative path from {from} to {to}. + * At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve. + * + * @param from + * @param to + */ + export function relative(from: string, to: string): string; + /** + * Return the directory name of a path. Similar to the Unix dirname command. + * + * @param p the path to evaluate. + */ + export function dirname(p: string): string; + /** + * Return the last portion of a path. Similar to the Unix basename command. + * Often used to extract the file name from a fully qualified path. + * + * @param p the path to evaluate. + * @param ext optionally, an extension to remove from the result. + */ + export function basename(p: string, ext?: string): string; + /** + * Return the extension of the path, from the last '.' to end of string in the last portion of the path. + * If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string + * + * @param p the path to evaluate. + */ + export function extname(p: string): string; + /** + * The platform-specific file separator. '\\' or '/'. + */ + export var sep: string; + /** + * The platform-specific file delimiter. ';' or ':'. + */ + export var delimiter: string; + /** + * Returns an object from a path string - the opposite of format(). + * + * @param pathString path to evaluate. + */ + export function parse(pathString: string): ParsedPath; + /** + * Returns a path string from an object - the opposite of parse(). + * + * @param pathString path to evaluate. + */ + export function format(pathObject: ParsedPath): string; + + export module posix { + export function normalize(p: string): string; + export function join(...paths: any[]): string; + export function resolve(...pathSegments: any[]): string; + export function isAbsolute(p: string): boolean; + export function relative(from: string, to: string): string; + export function dirname(p: string): string; + export function basename(p: string, ext?: string): string; + export function extname(p: string): string; + export var sep: string; + export var delimiter: string; + export function parse(p: string): ParsedPath; + export function format(pP: ParsedPath): string; + } + + export module win32 { + export function normalize(p: string): string; + export function join(...paths: any[]): string; + export function resolve(...pathSegments: any[]): string; + export function isAbsolute(p: string): boolean; + export function relative(from: string, to: string): string; + export function dirname(p: string): string; + export function basename(p: string, ext?: string): string; + export function extname(p: string): string; + export var sep: string; + export var delimiter: string; + export function parse(p: string): ParsedPath; + export function format(pP: ParsedPath): string; + } +} diff --git a/node_modules/update-notifier/check.js b/node_modules/update-notifier/check.js new file mode 100644 index 00000000..521f84af --- /dev/null +++ b/node_modules/update-notifier/check.js @@ -0,0 +1,22 @@ +/* eslint-disable unicorn/no-process-exit */ +'use strict'; +let updateNotifier = require('.'); + +const options = JSON.parse(process.argv[2]); + +updateNotifier = new updateNotifier.UpdateNotifier(options); + +updateNotifier.checkNpm().then(update => { + // Only update the last update check time on success + updateNotifier.config.set('lastUpdateCheck', Date.now()); + + if (update.type && update.type !== 'latest') { + updateNotifier.config.set('update', update); + } + + // Call process exit explicitly to terminate the child process + // Otherwise the child process will run forever, according to the Node.js docs + process.exit(); +}).catch(() => { + process.exit(1); +}); diff --git a/node_modules/update-notifier/index.js b/node_modules/update-notifier/index.js new file mode 100644 index 00000000..38ff01e2 --- /dev/null +++ b/node_modules/update-notifier/index.js @@ -0,0 +1,155 @@ +'use strict'; +const spawn = require('child_process').spawn; +const path = require('path'); +const format = require('util').format; +const importLazy = require('import-lazy')(require); + +const configstore = importLazy('configstore'); +const chalk = importLazy('chalk'); +const semverDiff = importLazy('semver-diff'); +const latestVersion = importLazy('latest-version'); +const isNpm = importLazy('is-npm'); +const isInstalledGlobally = importLazy('is-installed-globally'); +const boxen = importLazy('boxen'); +const xdgBasedir = importLazy('xdg-basedir'); +const isCi = importLazy('is-ci'); +const ONE_DAY = 1000 * 60 * 60 * 24; + +class UpdateNotifier { + constructor(options) { + options = options || {}; + this.options = options; + options.pkg = options.pkg || {}; + + // Reduce pkg to the essential keys. with fallback to deprecated options + // TODO: Remove deprecated options at some point far into the future + options.pkg = { + name: options.pkg.name || options.packageName, + version: options.pkg.version || options.packageVersion + }; + + if (!options.pkg.name || !options.pkg.version) { + throw new Error('pkg.name and pkg.version required'); + } + + this.packageName = options.pkg.name; + this.packageVersion = options.pkg.version; + this.updateCheckInterval = typeof options.updateCheckInterval === 'number' ? options.updateCheckInterval : ONE_DAY; + this.hasCallback = typeof options.callback === 'function'; + this.callback = options.callback || (() => {}); + this.disabled = 'NO_UPDATE_NOTIFIER' in process.env || + process.argv.indexOf('--no-update-notifier') !== -1 || + isCi(); + this.shouldNotifyInNpmScript = options.shouldNotifyInNpmScript; + + if (!this.disabled && !this.hasCallback) { + try { + const ConfigStore = configstore(); + this.config = new ConfigStore(`update-notifier-${this.packageName}`, { + optOut: false, + // Init with the current time so the first check is only + // after the set interval, so not to bother users right away + lastUpdateCheck: Date.now() + }); + } catch (err) { + // Expecting error code EACCES or EPERM + const msg = + chalk().yellow(format(' %s update check failed ', options.pkg.name)) + + format('\n Try running with %s or get access ', chalk().cyan('sudo')) + + '\n to the local update config store via \n' + + chalk().cyan(format(' sudo chown -R $USER:$(id -gn $USER) %s ', xdgBasedir().config)); + + process.on('exit', () => { + console.error('\n' + boxen()(msg, {align: 'center'})); + }); + } + } + } + check() { + if (this.hasCallback) { + this.checkNpm() + .then(update => this.callback(null, update)) + .catch(err => this.callback(err)); + return; + } + + if ( + !this.config || + this.config.get('optOut') || + this.disabled + ) { + return; + } + + this.update = this.config.get('update'); + + if (this.update) { + this.config.delete('update'); + } + + // Only check for updates on a set interval + if (Date.now() - this.config.get('lastUpdateCheck') < this.updateCheckInterval) { + return; + } + + // Spawn a detached process, passing the options as an environment property + spawn(process.execPath, [path.join(__dirname, 'check.js'), JSON.stringify(this.options)], { + detached: true, + stdio: 'ignore' + }).unref(); + } + checkNpm() { + return latestVersion()(this.packageName).then(latestVersion => { + return { + latest: latestVersion, + current: this.packageVersion, + type: semverDiff()(this.packageVersion, latestVersion) || 'latest', + name: this.packageName + }; + }); + } + notify(opts) { + const suppressForNpm = !this.shouldNotifyInNpmScript && isNpm(); + if (!process.stdout.isTTY || suppressForNpm || !this.update) { + return this; + } + + opts = Object.assign({isGlobal: isInstalledGlobally()}, opts); + + opts.message = opts.message || 'Update available ' + chalk().dim(this.update.current) + chalk().reset(' → ') + + chalk().green(this.update.latest) + ' \nRun ' + chalk().cyan('npm i ' + (opts.isGlobal ? '-g ' : '') + this.packageName) + ' to update'; + + opts.boxenOpts = opts.boxenOpts || { + padding: 1, + margin: 1, + align: 'center', + borderColor: 'yellow', + borderStyle: 'round' + }; + + const message = '\n' + boxen()(opts.message, opts.boxenOpts); + + if (opts.defer === false) { + console.error(message); + } else { + process.on('exit', () => { + console.error(message); + }); + + process.on('SIGINT', () => { + console.error(''); + process.exit(); + }); + } + + return this; + } +} + +module.exports = options => { + const updateNotifier = new UpdateNotifier(options); + updateNotifier.check(); + return updateNotifier; +}; + +module.exports.UpdateNotifier = UpdateNotifier; diff --git a/node_modules/update-notifier/license b/node_modules/update-notifier/license new file mode 100644 index 00000000..cea5a355 --- /dev/null +++ b/node_modules/update-notifier/license @@ -0,0 +1,9 @@ +Copyright Google + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/update-notifier/node_modules/.bin/is-ci b/node_modules/update-notifier/node_modules/.bin/is-ci new file mode 120000 index 00000000..00628fcb --- /dev/null +++ b/node_modules/update-notifier/node_modules/.bin/is-ci @@ -0,0 +1 @@ +../../../is-ci/bin.js \ No newline at end of file diff --git a/node_modules/update-notifier/package.json b/node_modules/update-notifier/package.json new file mode 100644 index 00000000..dc820a36 --- /dev/null +++ b/node_modules/update-notifier/package.json @@ -0,0 +1,55 @@ +{ + "name": "update-notifier", + "version": "2.5.0", + "description": "Update notifications for your CLI app", + "license": "BSD-2-Clause", + "repository": "yeoman/update-notifier", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava --timeout=20s" + }, + "files": [ + "index.js", + "check.js" + ], + "keywords": [ + "npm", + "update", + "updater", + "notify", + "notifier", + "check", + "checker", + "cli", + "module", + "package", + "version" + ], + "dependencies": { + "boxen": "^1.2.1", + "chalk": "^2.0.1", + "configstore": "^3.0.0", + "import-lazy": "^2.1.0", + "is-ci": "^1.0.10", + "is-installed-globally": "^0.1.0", + "is-npm": "^1.0.0", + "latest-version": "^3.0.0", + "semver-diff": "^2.0.0", + "xdg-basedir": "^3.0.0" + }, + "devDependencies": { + "ava": "*", + "clear-module": "^2.1.0", + "fixture-stdout": "^0.2.1", + "mock-require": "^2.0.2", + "strip-ansi": "^4.0.0", + "xo": "^0.18.2" + } +} diff --git a/node_modules/update-notifier/readme.md b/node_modules/update-notifier/readme.md new file mode 100644 index 00000000..ef6a1a8b --- /dev/null +++ b/node_modules/update-notifier/readme.md @@ -0,0 +1,193 @@ +# update-notifier [![Build Status](https://travis-ci.org/yeoman/update-notifier.svg?branch=master)](https://travis-ci.org/yeoman/update-notifier) + +> Update notifications for your CLI app + +![](screenshot.png) + +Inform users of your package of updates in a non-intrusive way. + +#### Contents + +- [Install](#install) +- [Usage](#usage) +- [How](#how) +- [API](#api) +- [About](#about) +- [Users](#users) + + +## Install + +``` +$ npm install update-notifier +``` + + +## Usage + +### Simple + +```js +const updateNotifier = require('update-notifier'); +const pkg = require('./package.json'); + +updateNotifier({pkg}).notify(); +``` + +### Comprehensive + +```js +const updateNotifier = require('update-notifier'); +const pkg = require('./package.json'); + +// Checks for available update and returns an instance +const notifier = updateNotifier({pkg}); + +// Notify using the built-in convenience method +notifier.notify(); + +// `notifier.update` contains some useful info about the update +console.log(notifier.update); +/* +{ + latest: '1.0.1', + current: '1.0.0', + type: 'patch', // Possible values: latest, major, minor, patch, prerelease, build + name: 'pageres' +} +*/ +``` + +### Options and custom message + +```js +const notifier = updateNotifier({ + pkg, + updateCheckInterval: 1000 * 60 * 60 * 24 * 7 // 1 week +}); + +if (notifier.update) { + console.log(`Update available: ${notifier.update.latest}`); +} +``` + + +## How + +Whenever you initiate the update notifier and it's not within the interval threshold, it will asynchronously check with npm in the background for available updates, then persist the result. The next time the notifier is initiated, the result will be loaded into the `.update` property. This prevents any impact on your package startup performance. +The update check is done in a unref'ed [child process](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). This means that if you call `process.exit`, the check will still be performed in its own process. + +The first time the user runs your app, it will check for an update, and even if an update is available, it will wait the specified `updateCheckInterval` before notifying the user. This is done to not be annoying to the user, but might surprise you as an implementer if you're testing whether it works. Check out [`example.js`](example.js) to quickly test out `update-notifier` and see how you can test that it works in your app. + + +## API + +### notifier = updateNotifier(options) + +Checks if there is an available update. Accepts options defined below. Returns an instance with an `.update` property there is an available update, otherwise `undefined`. + +### options + +#### pkg + +Type: `Object` + +##### name + +*Required*
+Type: `string` + +##### version + +*Required*
+Type: `string` + +#### updateCheckInterval + +Type: `number`
+Default: `1000 * 60 * 60 * 24` *(1 day)* + +How often to check for updates. + +#### callback(error, update) + +Type: `Function` + +Passing a callback here will make it check for an update directly and report right away. Not recommended as you won't get the benefits explained in [`How`](#how). `update` is equal to `notifier.update`. + +### notifier.notify([options]) + +Convenience method to display a notification message. *(See screenshot)* + +Only notifies if there is an update and the process is [TTY](https://nodejs.org/api/process.html#process_tty_terminals_and_process_stdout). + +#### options + +Type: `Object` + +##### defer + +Type: `boolean`
+Default: `true` + +Defer showing the notification to after the process has exited. + +##### message + +Type: `string`
+Default: [See above screenshot](https://github.com/yeoman/update-notifier#update-notifier-) + +Message that will be shown when an update is available. + +##### isGlobal + +Type: `boolean`
+Default: `true` + +Include the `-g` argument in the default message's `npm i` recommendation. You may want to change this if your CLI package can be installed as a dependency of another project, and don't want to recommend a global installation. This option is ignored if you supply your own `message` (see above). + +##### boxenOpts + +Type: `Object`
+Default: `{padding: 1, margin: 1, align: 'center', borderColor: 'yellow', borderStyle: 'round'}` *(See screenshot)* + +Options object that will be passed to [`boxen`](https://github.com/sindresorhus/boxen). + +##### shouldNotifyInNpmScript + +Type: `boolean`
+Default: `false` + +Allows notification to be shown when running as an npm script. + +### User settings + +Users of your module have the ability to opt-out of the update notifier by changing the `optOut` property to `true` in `~/.config/configstore/update-notifier-[your-module-name].json`. The path is available in `notifier.config.path`. + +Users can also opt-out by [setting the environment variable](https://github.com/sindresorhus/guides/blob/master/set-environment-variables.md) `NO_UPDATE_NOTIFIER` with any value or by using the `--no-update-notifier` flag on a per run basis. + +The check is also skipped on CI automatically. + + +## About + +The idea for this module came from the desire to apply the browser update strategy to CLI tools, where everyone is always on the latest version. We first tried automatic updating, which we discovered wasn't popular. This is the second iteration of that idea, but limited to just update notifications. + + +## Users + +There are a bunch projects using it: + +- [npm](https://github.com/npm/npm) - Package manager for JavaScript +- [Yeoman](http://yeoman.io) - Modern workflows for modern webapps +- [AVA](https://ava.li) - Simple concurrent test runner +- [XO](https://github.com/xojs/xo) - JavaScript happiness style linter +- [Pageres](https://github.com/sindresorhus/pageres) - Capture website screenshots +- [Node GH](http://nodegh.io) - GitHub command line tool + +[And 1600+ more…](https://www.npmjs.org/browse/depended/update-notifier) + + +## License + +BSD-2-Clause © Google diff --git a/node_modules/urix/.jshintrc b/node_modules/urix/.jshintrc new file mode 100644 index 00000000..9d1a6183 --- /dev/null +++ b/node_modules/urix/.jshintrc @@ -0,0 +1,42 @@ +{ + "bitwise": true, + "camelcase": true, + "curly": false, + "eqeqeq": true, + "es3": false, + "forin": true, + "immed": false, + "indent": false, + "latedef": "nofunc", + "newcap": false, + "noarg": true, + "noempty": true, + "nonew": false, + "plusplus": false, + "quotmark": true, + "undef": true, + "unused": "vars", + "strict": false, + "trailing": true, + "maxparams": 5, + "maxdepth": false, + "maxstatements": false, + "maxcomplexity": false, + "maxlen": 100, + + "asi": true, + "expr": true, + "globalstrict": true, + "smarttabs": true, + "sub": true, + + "node": true, + "globals": { + "describe": false, + "it": false, + "before": false, + "beforeEach": false, + "after": false, + "afterEach": false + } +} diff --git a/node_modules/urix/LICENSE b/node_modules/urix/LICENSE new file mode 100644 index 00000000..0595be36 --- /dev/null +++ b/node_modules/urix/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 Simon Lydell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/urix/index.js b/node_modules/urix/index.js new file mode 100644 index 00000000..dc6ef270 --- /dev/null +++ b/node_modules/urix/index.js @@ -0,0 +1,17 @@ +// Copyright 2014 Simon Lydell +// X11 (“MIT”) Licensed. (See LICENSE.) + +var path = require("path") + +"use strict" + +function urix(aPath) { + if (path.sep === "\\") { + return aPath + .replace(/\\/g, "/") + .replace(/^[a-z]:\/?/i, "/") + } + return aPath +} + +module.exports = urix diff --git a/node_modules/urix/package.json b/node_modules/urix/package.json new file mode 100644 index 00000000..992e3295 --- /dev/null +++ b/node_modules/urix/package.json @@ -0,0 +1,25 @@ +{ + "name": "urix", + "version": "0.1.0", + "author": "Simon Lydell", + "license": "MIT", + "description": "Makes Windows-style paths more unix and URI friendly.", + "main": "index.js", + "repository": "lydell/urix", + "keywords": [ + "path", + "url", + "uri", + "unix", + "windows", + "backslash", + "slash" + ], + "scripts": { + "test": "jshint index.js test/ && mocha" + }, + "devDependencies": { + "mocha": "^1.17.1", + "jshint": "^2.4.4" + } +} diff --git a/node_modules/urix/readme.md b/node_modules/urix/readme.md new file mode 100644 index 00000000..b258b986 --- /dev/null +++ b/node_modules/urix/readme.md @@ -0,0 +1,46 @@ +[![Build Status](https://travis-ci.org/lydell/urix.png?branch=master)](https://travis-ci.org/lydell/urix) + +Overview +======== + +Makes Windows-style paths more unix and URI friendly. Useful if you work with +paths that eventually will be used in URLs. + +```js +var urix = require("urix") + +// On Windows: +urix("c:\\users\\you\\foo") +// /users/you/foo + +// On unix-like systems: +urix("c:\\users\\you\\foo") +// c:\users\you\foo +``` + + +Installation +============ + +`npm install urix` + +```js +var urix = require("urix") +``` + + +Usage +===== + +### `urix(path)` ### + +On Windows, replaces all backslashes with slashes and uses a slash instead of a +drive letter and a colon for absolute paths. + +On unix-like systems it is a no-op. + + +License +======= + +[The X11 (“MIT”) License](LICENSE). diff --git a/node_modules/urix/test/index.js b/node_modules/urix/test/index.js new file mode 100644 index 00000000..5333f246 --- /dev/null +++ b/node_modules/urix/test/index.js @@ -0,0 +1,43 @@ +// Copyright 2014 Simon Lydell +// X11 (“MIT”) Licensed. (See LICENSE.) + +var path = require("path") +var assert = require("assert") +var urix = require("../") + +"use stict" + +function test(testPath, expected) { + path.sep = "\\" + assert.equal(urix(testPath), expected) + path.sep = "/" + assert.equal(urix(testPath), testPath) +} + +describe("urix", function() { + + it("is a function", function() { + assert.equal(typeof urix, "function") + }) + + + it("converts backslashes to slashes", function() { + test("a\\b\\c", "a/b/c") + test("\\a\\b\\c", "/a/b/c") + test("a/b\\c", "a/b/c") + test("\\\\a\\\\\\b///c", "//a///b///c") + }) + + + it("changes the drive letter to a slash", function() { + test("c:\\a", "/a") + test("C:\\a", "/a") + test("z:\\a", "/a") + test("c:a", "/a") + test("c:/a", "/a") + test("c:\\\\a", "//a") + test("c://a", "//a") + test("c:\\//a", "///a") + }) + +}) diff --git a/node_modules/url-parse-lax/index.js b/node_modules/url-parse-lax/index.js new file mode 100644 index 00000000..f9c0c65a --- /dev/null +++ b/node_modules/url-parse-lax/index.js @@ -0,0 +1,14 @@ +'use strict'; +var url = require('url'); +var prependHttp = require('prepend-http'); + +module.exports = function (x) { + var withProtocol = prependHttp(x); + var parsed = url.parse(withProtocol); + + if (withProtocol !== x) { + parsed.protocol = null; + } + + return parsed; +}; diff --git a/node_modules/url-parse-lax/license b/node_modules/url-parse-lax/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/url-parse-lax/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/url-parse-lax/package.json b/node_modules/url-parse-lax/package.json new file mode 100644 index 00000000..110c7bbb --- /dev/null +++ b/node_modules/url-parse-lax/package.json @@ -0,0 +1,41 @@ +{ + "name": "url-parse-lax", + "version": "1.0.0", + "description": "url.parse() with support for protocol-less URLs & IPs", + "license": "MIT", + "repository": "sindresorhus/url-parse-lax", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "url", + "uri", + "parse", + "parser", + "loose", + "lax", + "protocol", + "less", + "protocol-less", + "ip", + "ipv4", + "ipv6" + ], + "dependencies": { + "prepend-http": "^1.0.1" + }, + "devDependencies": { + "ava": "0.0.4" + } +} diff --git a/node_modules/url-parse-lax/readme.md b/node_modules/url-parse-lax/readme.md new file mode 100644 index 00000000..f2639446 --- /dev/null +++ b/node_modules/url-parse-lax/readme.md @@ -0,0 +1,100 @@ +# url-parse-lax [![Build Status](https://travis-ci.org/sindresorhus/url-parse-lax.svg?branch=master)](https://travis-ci.org/sindresorhus/url-parse-lax) + +> [`url.parse()`](https://nodejs.org/docs/latest/api/url.html#url_url_parse_urlstr_parsequerystring_slashesdenotehost) with support for protocol-less URLs & IPs + + +## Install + +``` +$ npm install --save url-parse-lax +``` + + +## Usage + +```js +var urlParseLax = require('url-parse-lax'); + +urlParseLax('sindresorhus.com'); +/* +{ + protocol: null, + slashes: true, + auth: null, + host: 'sindresorhus.com', + port: null, + hostname: 'sindresorhus.com', + hash: null, + search: null, + query: null, + pathname: '/', + path: '/', + href: 'http://sindresorhus.com/' +} +*/ + +urlParseLax('[2001:db8::]:8000'); +/* +{ + protocol: null, + slashes: true, + auth: null, + host: '[2001:db8::]:8000', + port: '8000', + hostname: '2001:db8::', + hash: null, + search: null, + query: null, + pathname: '/', + path: '/', + href: 'http://[2001:db8::]:8000/' +} +*/ +``` + +And with the built-in `url.parse()`: + +```js +var url = require('url'); + +url.parse('sindresorhus.com'); +/* +{ + protocol: null, + slashes: null, + auth: null, + host: null, + port: null, + hostname: null, + hash: null, + search: null, + query: null, + pathname: 'sindresorhus', + path: 'sindresorhus', + href: 'sindresorhus' +} +*/ + +url.parse('[2001:db8::]:8000'); +/* +{ + protocol: null, + slashes: null, + auth: null, + host: null, + port: null, + hostname: null, + hash: null, + search: null, + query: null, + pathname: '[2001:db8::]:8000', + path: '[2001:db8::]:8000', + href: '[2001:db8::]:8000' +} +*/ +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/use/LICENSE b/node_modules/use/LICENSE new file mode 100644 index 00000000..7cccaf9e --- /dev/null +++ b/node_modules/use/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/use/README.md b/node_modules/use/README.md new file mode 100644 index 00000000..59b1dbd8 --- /dev/null +++ b/node_modules/use/README.md @@ -0,0 +1,90 @@ +# use [![NPM version](https://img.shields.io/npm/v/use.svg?style=flat)](https://www.npmjs.com/package/use) [![NPM monthly downloads](https://img.shields.io/npm/dm/use.svg?style=flat)](https://npmjs.org/package/use) [![NPM total downloads](https://img.shields.io/npm/dt/use.svg?style=flat)](https://npmjs.org/package/use) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/use.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/use) + +> Easily add plugin support to your node.js application. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save use +``` + +A different take on plugin handling! This is not a middleware system, if you need something that handles async middleware, [ware](https://github.com/segmentio/ware) is great for that. + +## Usage + +```js +const use = require('use'); +``` + +See the [examples folder](./examples) for usage examples. + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [base-plugins](https://www.npmjs.com/package/base-plugins): Adds 'smart plugin' support to your base application. | [homepage](https://github.com/node-base/base-plugins "Adds 'smart plugin' support to your base application.") +* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks") +* [ware](https://www.npmjs.com/package/ware): Easily create your own middleware layer. | [homepage](https://github.com/segmentio/ware "Easily create your own middleware layer.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 37 | [jonschlinkert](https://github.com/jonschlinkert) | +| 7 | [charlike-old](https://github.com/charlike-old) | +| 2 | [doowb](https://github.com/doowb) | +| 2 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on July 12, 2018._ \ No newline at end of file diff --git a/node_modules/use/index.js b/node_modules/use/index.js new file mode 100644 index 00000000..9a1eb4ee --- /dev/null +++ b/node_modules/use/index.js @@ -0,0 +1,155 @@ +/*! + * use + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +module.exports = function base(app, options) { + if (!isObject(app) && typeof app !== 'function') { + throw new TypeError('expected an object or function'); + } + + var opts = isObject(options) ? options : {}; + var prop = typeof opts.prop === 'string' ? opts.prop : 'fns'; + if (!Array.isArray(app[prop])) { + define(app, prop, []); + } + + /** + * Define a plugin function to be passed to use. The only + * parameter exposed to the plugin is `app`, the object or function. + * passed to `use(app)`. `app` is also exposed as `this` in plugins. + * + * Additionally, **if a plugin returns a function, the function will + * be pushed onto the `fns` array**, allowing the plugin to be + * called at a later point by the `run` method. + * + * ```js + * var use = require('use'); + * + * // define a plugin + * function foo(app) { + * // do stuff + * } + * + * var app = function(){}; + * use(app); + * + * // register plugins + * app.use(foo); + * app.use(bar); + * app.use(baz); + * ``` + * @name .use + * @param {Function} `fn` plugin function to call + * @api public + */ + + define(app, 'use', use); + + /** + * Run all plugins on `fns`. Any plugin that returns a function + * when called by `use` is pushed onto the `fns` array. + * + * ```js + * var config = {}; + * app.run(config); + * ``` + * @name .run + * @param {Object} `value` Object to be modified by plugins. + * @return {Object} Returns the object passed to `run` + * @api public + */ + + define(app, 'run', function(val) { + if (!isObject(val)) return; + + if (!val.use || !val.run) { + define(val, prop, val[prop] || []); + define(val, 'use', use); + } + + if (!val[prop] || val[prop].indexOf(base) === -1) { + val.use(base); + } + + var self = this || app; + var fns = self[prop]; + var len = fns.length; + var idx = -1; + + while (++idx < len) { + val.use(fns[idx]); + } + return val; + }); + + /** + * Call plugin `fn`. If a function is returned push it into the + * `fns` array to be called by the `run` method. + */ + + function use(type, fn, options) { + var offset = 1; + + if (typeof type === 'string' || Array.isArray(type)) { + fn = wrap(type, fn); + offset++; + } else { + options = fn; + fn = type; + } + + if (typeof fn !== 'function') { + throw new TypeError('expected a function'); + } + + var self = this || app; + var fns = self[prop]; + + var args = [].slice.call(arguments, offset); + args.unshift(self); + + if (typeof opts.hook === 'function') { + opts.hook.apply(self, args); + } + + var val = fn.apply(self, args); + if (typeof val === 'function' && fns.indexOf(val) === -1) { + fns.push(val); + } + return self; + } + + /** + * Wrap a named plugin function so that it's only called on objects of the + * given `type` + * + * @param {String} `type` + * @param {Function} `fn` Plugin function + * @return {Function} + */ + + function wrap(type, fn) { + return function plugin() { + return this.type === type ? fn.apply(this, arguments) : plugin; + }; + } + + return app; +}; + +function isObject(val) { + return val && typeof val === 'object' && !Array.isArray(val); +} + +function define(obj, key, val) { + Object.defineProperty(obj, key, { + configurable: true, + writable: true, + value: val + }); +} diff --git a/node_modules/use/package.json b/node_modules/use/package.json new file mode 100644 index 00000000..b7a3f9a1 --- /dev/null +++ b/node_modules/use/package.json @@ -0,0 +1,66 @@ +{ + "name": "use", + "description": "Easily add plugin support to your node.js application.", + "version": "3.1.1", + "homepage": "https://github.com/jonschlinkert/use", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Olsten Larck (https://i.am.charlike.online)", + "(https://github.com/wtgtybhertgeghgtwtg)" + ], + "repository": "jonschlinkert/use", + "bugs": { + "url": "https://github.com/jonschlinkert/use/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "base-plugins": "^1.0.0", + "define-property": "^2.0.0", + "extend-shallow": "^3.0.1", + "gulp": "^3.9.1", + "gulp-eslint": "^4.0.0", + "gulp-format-md": "^1.0.0", + "gulp-istanbul": "^1.1.2", + "gulp-mocha": "^3.0.1", + "mocha": "^4.0.1" + }, + "keywords": [ + "use" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "base", + "base-plugins", + "ware" + ] + }, + "reflinks": [ + "verb", + "ware" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/util-deprecate/History.md b/node_modules/util-deprecate/History.md new file mode 100644 index 00000000..acc86753 --- /dev/null +++ b/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/node_modules/util-deprecate/LICENSE b/node_modules/util-deprecate/LICENSE new file mode 100644 index 00000000..6a60e8c2 --- /dev/null +++ b/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/README.md b/node_modules/util-deprecate/README.md new file mode 100644 index 00000000..75622fa7 --- /dev/null +++ b/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/browser.js b/node_modules/util-deprecate/browser.js new file mode 100644 index 00000000..549ae2f0 --- /dev/null +++ b/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/node_modules/util-deprecate/node.js b/node_modules/util-deprecate/node.js new file mode 100644 index 00000000..5e6fcff5 --- /dev/null +++ b/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/node_modules/util-deprecate/package.json b/node_modules/util-deprecate/package.json new file mode 100644 index 00000000..2e79f89a --- /dev/null +++ b/node_modules/util-deprecate/package.json @@ -0,0 +1,27 @@ +{ + "name": "util-deprecate", + "version": "1.0.2", + "description": "The Node.js `util.deprecate()` function with browser support", + "main": "node.js", + "browser": "browser.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "homepage": "https://github.com/TooTallNate/util-deprecate" +} diff --git a/node_modules/utils-merge/.npmignore b/node_modules/utils-merge/.npmignore new file mode 100644 index 00000000..3e538441 --- /dev/null +++ b/node_modules/utils-merge/.npmignore @@ -0,0 +1,9 @@ +CONTRIBUTING.md +Makefile +docs/ +examples/ +reports/ +test/ + +.jshintrc +.travis.yml diff --git a/node_modules/utils-merge/LICENSE b/node_modules/utils-merge/LICENSE new file mode 100644 index 00000000..76f6d083 --- /dev/null +++ b/node_modules/utils-merge/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013-2017 Jared Hanson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/utils-merge/README.md b/node_modules/utils-merge/README.md new file mode 100644 index 00000000..0cb71171 --- /dev/null +++ b/node_modules/utils-merge/README.md @@ -0,0 +1,34 @@ +# utils-merge + +[![Version](https://img.shields.io/npm/v/utils-merge.svg?label=version)](https://www.npmjs.com/package/utils-merge) +[![Build](https://img.shields.io/travis/jaredhanson/utils-merge.svg)](https://travis-ci.org/jaredhanson/utils-merge) +[![Quality](https://img.shields.io/codeclimate/github/jaredhanson/utils-merge.svg?label=quality)](https://codeclimate.com/github/jaredhanson/utils-merge) +[![Coverage](https://img.shields.io/coveralls/jaredhanson/utils-merge.svg)](https://coveralls.io/r/jaredhanson/utils-merge) +[![Dependencies](https://img.shields.io/david/jaredhanson/utils-merge.svg)](https://david-dm.org/jaredhanson/utils-merge) + + +Merges the properties from a source object into a destination object. + +## Install + +```bash +$ npm install utils-merge +``` + +## Usage + +```javascript +var a = { foo: 'bar' } + , b = { bar: 'baz' }; + +merge(a, b); +// => { foo: 'bar', bar: 'baz' } +``` + +## License + +[The MIT License](http://opensource.org/licenses/MIT) + +Copyright (c) 2013-2017 Jared Hanson <[http://jaredhanson.net/](http://jaredhanson.net/)> + + Sponsor diff --git a/node_modules/utils-merge/index.js b/node_modules/utils-merge/index.js new file mode 100644 index 00000000..4265c694 --- /dev/null +++ b/node_modules/utils-merge/index.js @@ -0,0 +1,23 @@ +/** + * Merge object b with object a. + * + * var a = { foo: 'bar' } + * , b = { bar: 'baz' }; + * + * merge(a, b); + * // => { foo: 'bar', bar: 'baz' } + * + * @param {Object} a + * @param {Object} b + * @return {Object} + * @api public + */ + +exports = module.exports = function(a, b){ + if (a && b) { + for (var key in b) { + a[key] = b[key]; + } + } + return a; +}; diff --git a/node_modules/utils-merge/package.json b/node_modules/utils-merge/package.json new file mode 100644 index 00000000..e36b0781 --- /dev/null +++ b/node_modules/utils-merge/package.json @@ -0,0 +1,40 @@ +{ + "name": "utils-merge", + "version": "1.0.1", + "description": "merge() utility function", + "keywords": [ + "util" + ], + "author": { + "name": "Jared Hanson", + "email": "jaredhanson@gmail.com", + "url": "http://www.jaredhanson.net/" + }, + "repository": { + "type": "git", + "url": "git://github.com/jaredhanson/utils-merge.git" + }, + "bugs": { + "url": "http://github.com/jaredhanson/utils-merge/issues" + }, + "license": "MIT", + "licenses": [ + { + "type": "MIT", + "url": "http://opensource.org/licenses/MIT" + } + ], + "main": "./index", + "dependencies": {}, + "devDependencies": { + "make-node": "0.3.x", + "mocha": "1.x.x", + "chai": "1.x.x" + }, + "engines": { + "node": ">= 0.4.0" + }, + "scripts": { + "test": "node_modules/.bin/mocha --reporter spec --require test/bootstrap/node test/*.test.js" + } +} diff --git a/node_modules/vary/HISTORY.md b/node_modules/vary/HISTORY.md new file mode 100644 index 00000000..f6cbcf7f --- /dev/null +++ b/node_modules/vary/HISTORY.md @@ -0,0 +1,39 @@ +1.1.2 / 2017-09-23 +================== + + * perf: improve header token parsing speed + +1.1.1 / 2017-03-20 +================== + + * perf: hoist regular expression + +1.1.0 / 2015-09-29 +================== + + * Only accept valid field names in the `field` argument + - Ensures the resulting string is a valid HTTP header value + +1.0.1 / 2015-07-08 +================== + + * Fix setting empty header from empty `field` + * perf: enable strict mode + * perf: remove argument reassignments + +1.0.0 / 2014-08-10 +================== + + * Accept valid `Vary` header string as `field` + * Add `vary.append` for low-level string manipulation + * Move to `jshttp` orgainzation + +0.1.0 / 2014-06-05 +================== + + * Support array of fields to set + +0.0.0 / 2014-06-04 +================== + + * Initial release diff --git a/node_modules/vary/LICENSE b/node_modules/vary/LICENSE new file mode 100644 index 00000000..84441fbb --- /dev/null +++ b/node_modules/vary/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2017 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/vary/README.md b/node_modules/vary/README.md new file mode 100644 index 00000000..cc000b34 --- /dev/null +++ b/node_modules/vary/README.md @@ -0,0 +1,101 @@ +# vary + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Manipulate the HTTP Vary header + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install vary +``` + +## API + + + +```js +var vary = require('vary') +``` + +### vary(res, field) + +Adds the given header `field` to the `Vary` response header of `res`. +This can be a string of a single field, a string of a valid `Vary` +header, or an array of multiple fields. + +This will append the header if not already listed, otherwise leaves +it listed in the current location. + + + +```js +// Append "Origin" to the Vary header of the response +vary(res, 'Origin') +``` + +### vary.append(header, field) + +Adds the given header `field` to the `Vary` response header string `header`. +This can be a string of a single field, a string of a valid `Vary` header, +or an array of multiple fields. + +This will append the header if not already listed, otherwise leaves +it listed in the current location. The new header string is returned. + + + +```js +// Get header string appending "Origin" to "Accept, User-Agent" +vary.append('Accept, User-Agent', 'Origin') +``` + +## Examples + +### Updating the Vary header when content is based on it + +```js +var http = require('http') +var vary = require('vary') + +http.createServer(function onRequest (req, res) { + // about to user-agent sniff + vary(res, 'User-Agent') + + var ua = req.headers['user-agent'] || '' + var isMobile = /mobi|android|touch|mini/i.test(ua) + + // serve site, depending on isMobile + res.setHeader('Content-Type', 'text/html') + res.end('You are (probably) ' + (isMobile ? '' : 'not ') + 'a mobile user') +}) +``` + +## Testing + +```sh +$ npm test +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/vary.svg +[npm-url]: https://npmjs.org/package/vary +[node-version-image]: https://img.shields.io/node/v/vary.svg +[node-version-url]: https://nodejs.org/en/download +[travis-image]: https://img.shields.io/travis/jshttp/vary/master.svg +[travis-url]: https://travis-ci.org/jshttp/vary +[coveralls-image]: https://img.shields.io/coveralls/jshttp/vary/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/vary +[downloads-image]: https://img.shields.io/npm/dm/vary.svg +[downloads-url]: https://npmjs.org/package/vary diff --git a/node_modules/vary/index.js b/node_modules/vary/index.js new file mode 100644 index 00000000..5b5e7412 --- /dev/null +++ b/node_modules/vary/index.js @@ -0,0 +1,149 @@ +/*! + * vary + * Copyright(c) 2014-2017 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + */ + +module.exports = vary +module.exports.append = append + +/** + * RegExp to match field-name in RFC 7230 sec 3.2 + * + * field-name = token + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + * / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + * / DIGIT / ALPHA + * ; any VCHAR, except delimiters + */ + +var FIELD_NAME_REGEXP = /^[!#$%&'*+\-.^_`|~0-9A-Za-z]+$/ + +/** + * Append a field to a vary header. + * + * @param {String} header + * @param {String|Array} field + * @return {String} + * @public + */ + +function append (header, field) { + if (typeof header !== 'string') { + throw new TypeError('header argument is required') + } + + if (!field) { + throw new TypeError('field argument is required') + } + + // get fields array + var fields = !Array.isArray(field) + ? parse(String(field)) + : field + + // assert on invalid field names + for (var j = 0; j < fields.length; j++) { + if (!FIELD_NAME_REGEXP.test(fields[j])) { + throw new TypeError('field argument contains an invalid header name') + } + } + + // existing, unspecified vary + if (header === '*') { + return header + } + + // enumerate current values + var val = header + var vals = parse(header.toLowerCase()) + + // unspecified vary + if (fields.indexOf('*') !== -1 || vals.indexOf('*') !== -1) { + return '*' + } + + for (var i = 0; i < fields.length; i++) { + var fld = fields[i].toLowerCase() + + // append value (case-preserving) + if (vals.indexOf(fld) === -1) { + vals.push(fld) + val = val + ? val + ', ' + fields[i] + : fields[i] + } + } + + return val +} + +/** + * Parse a vary header into an array. + * + * @param {String} header + * @return {Array} + * @private + */ + +function parse (header) { + var end = 0 + var list = [] + var start = 0 + + // gather tokens + for (var i = 0, len = header.length; i < len; i++) { + switch (header.charCodeAt(i)) { + case 0x20: /* */ + if (start === end) { + start = end = i + 1 + } + break + case 0x2c: /* , */ + list.push(header.substring(start, end)) + start = end = i + 1 + break + default: + end = i + 1 + break + } + } + + // final token + list.push(header.substring(start, end)) + + return list +} + +/** + * Mark that a request is varied on a header field. + * + * @param {Object} res + * @param {String|Array} field + * @public + */ + +function vary (res, field) { + if (!res || !res.getHeader || !res.setHeader) { + // quack quack + throw new TypeError('res argument is required') + } + + // get existing header + var val = res.getHeader('Vary') || '' + var header = Array.isArray(val) + ? val.join(', ') + : String(val) + + // set new header + if ((val = append(header, field))) { + res.setHeader('Vary', val) + } +} diff --git a/node_modules/vary/package.json b/node_modules/vary/package.json new file mode 100644 index 00000000..028f72a9 --- /dev/null +++ b/node_modules/vary/package.json @@ -0,0 +1,43 @@ +{ + "name": "vary", + "description": "Manipulate the HTTP Vary header", + "version": "1.1.2", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "http", + "res", + "vary" + ], + "repository": "jshttp/vary", + "devDependencies": { + "beautify-benchmark": "0.2.4", + "benchmark": "2.1.4", + "eslint": "3.19.0", + "eslint-config-standard": "10.2.1", + "eslint-plugin-import": "2.7.0", + "eslint-plugin-markdown": "1.0.0-beta.6", + "eslint-plugin-node": "5.1.1", + "eslint-plugin-promise": "3.5.0", + "eslint-plugin-standard": "3.0.1", + "istanbul": "0.4.5", + "mocha": "2.5.3", + "supertest": "1.1.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "bench": "node benchmark/index.js", + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/which/CHANGELOG.md b/node_modules/which/CHANGELOG.md new file mode 100644 index 00000000..3d83d269 --- /dev/null +++ b/node_modules/which/CHANGELOG.md @@ -0,0 +1,152 @@ +# Changes + + +## 1.3.1 + +* update deps +* update travis + +## v1.3.0 + +* Add nothrow option to which.sync +* update tap + +## v1.2.14 + +* appveyor: drop node 5 and 0.x +* travis-ci: add node 6, drop 0.x + +## v1.2.13 + +* test: Pass missing option to pass on windows +* update tap +* update isexe to 2.0.0 +* neveragain.tech pledge request + +## v1.2.12 + +* Removed unused require + +## v1.2.11 + +* Prevent changelog script from being included in package + +## v1.2.10 + +* Use env.PATH only, not env.Path + +## v1.2.9 + +* fix for paths starting with ../ +* Remove unused `is-absolute` module + +## v1.2.8 + +* bullet items in changelog that contain (but don't start with) # + +## v1.2.7 + +* strip 'update changelog' changelog entries out of changelog + +## v1.2.6 + +* make the changelog bulleted + +## v1.2.5 + +* make a changelog, and keep it up to date +* don't include tests in package +* Properly handle relative-path executables +* appveyor +* Attach error code to Not Found error +* Make tests pass on Windows + +## v1.2.4 + +* Fix typo + +## v1.2.3 + +* update isexe, fix regression in pathExt handling + +## v1.2.2 + +* update deps, use isexe module, test windows + +## v1.2.1 + +* Sometimes windows PATH entries are quoted +* Fixed a bug in the check for group and user mode bits. This bug was introduced during refactoring for supporting strict mode. +* doc cli + +## v1.2.0 + +* Add support for opt.all and -as cli flags +* test the bin +* update travis +* Allow checking for multiple programs in bin/which +* tap 2 + +## v1.1.2 + +* travis +* Refactored and fixed undefined error on Windows +* Support strict mode + +## v1.1.1 + +* test +g exes against secondary groups, if available +* Use windows exe semantics on cygwin & msys +* cwd should be first in path on win32, not last +* Handle lower-case 'env.Path' on Windows +* Update docs +* use single-quotes + +## v1.1.0 + +* Add tests, depend on is-absolute + +## v1.0.9 + +* which.js: root is allowed to execute files owned by anyone + +## v1.0.8 + +* don't use graceful-fs + +## v1.0.7 + +* add license to package.json + +## v1.0.6 + +* isc license + +## 1.0.5 + +* Awful typo + +## 1.0.4 + +* Test for path absoluteness properly +* win: Allow '' as a pathext if cmd has a . in it + +## 1.0.3 + +* Remove references to execPath +* Make `which.sync()` work on Windows by honoring the PATHEXT variable. +* Make `isExe()` always return true on Windows. +* MIT + +## 1.0.2 + +* Only files can be exes + +## 1.0.1 + +* Respect the PATHEXT env for win32 support +* should 0755 the bin +* binary +* guts +* package +* 1st diff --git a/node_modules/which/LICENSE b/node_modules/which/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/which/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/which/README.md b/node_modules/which/README.md new file mode 100644 index 00000000..8c0b0cbf --- /dev/null +++ b/node_modules/which/README.md @@ -0,0 +1,51 @@ +# which + +Like the unix `which` utility. + +Finds the first instance of a specified executable in the PATH +environment variable. Does not cache the results, so `hash -r` is not +needed when the PATH changes. + +## USAGE + +```javascript +var which = require('which') + +// async usage +which('node', function (er, resolvedPath) { + // er is returned if no "node" is found on the PATH + // if it is found, then the absolute path to the exec is returned +}) + +// sync usage +// throws if not found +var resolved = which.sync('node') + +// if nothrow option is used, returns null if not found +resolved = which.sync('node', {nothrow: true}) + +// Pass options to override the PATH and PATHEXT environment vars. +which('node', { path: someOtherPath }, function (er, resolved) { + if (er) + throw er + console.log('found at %j', resolved) +}) +``` + +## CLI USAGE + +Same as the BSD `which(1)` binary. + +``` +usage: which [-as] program ... +``` + +## OPTIONS + +You may pass an options object as the second argument. + +- `path`: Use instead of the `PATH` environment variable. +- `pathExt`: Use instead of the `PATHEXT` environment variable. +- `all`: Return all matches, instead of just the first one. Note that + this means the function returns an array of strings instead of a + single string. diff --git a/node_modules/which/bin/which b/node_modules/which/bin/which new file mode 100755 index 00000000..7cee3729 --- /dev/null +++ b/node_modules/which/bin/which @@ -0,0 +1,52 @@ +#!/usr/bin/env node +var which = require("../") +if (process.argv.length < 3) + usage() + +function usage () { + console.error('usage: which [-as] program ...') + process.exit(1) +} + +var all = false +var silent = false +var dashdash = false +var args = process.argv.slice(2).filter(function (arg) { + if (dashdash || !/^-/.test(arg)) + return true + + if (arg === '--') { + dashdash = true + return false + } + + var flags = arg.substr(1).split('') + for (var f = 0; f < flags.length; f++) { + var flag = flags[f] + switch (flag) { + case 's': + silent = true + break + case 'a': + all = true + break + default: + console.error('which: illegal option -- ' + flag) + usage() + } + } + return false +}) + +process.exit(args.reduce(function (pv, current) { + try { + var f = which.sync(current, { all: all }) + if (all) + f = f.join('\n') + if (!silent) + console.log(f) + return pv; + } catch (e) { + return 1; + } +}, 0)) diff --git a/node_modules/which/package.json b/node_modules/which/package.json new file mode 100644 index 00000000..51be376f --- /dev/null +++ b/node_modules/which/package.json @@ -0,0 +1,30 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "which", + "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", + "version": "1.3.1", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-which.git" + }, + "main": "which.js", + "bin": "./bin/which", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "devDependencies": { + "mkdirp": "^0.5.0", + "rimraf": "^2.6.2", + "tap": "^12.0.1" + }, + "scripts": { + "test": "tap test/*.js --cov", + "changelog": "bash gen-changelog.sh", + "postversion": "npm run changelog && git add CHANGELOG.md && git commit -m 'update changelog - '${npm_package_version}" + }, + "files": [ + "which.js", + "bin/which" + ] +} diff --git a/node_modules/which/which.js b/node_modules/which/which.js new file mode 100644 index 00000000..4347f91a --- /dev/null +++ b/node_modules/which/which.js @@ -0,0 +1,135 @@ +module.exports = which +which.sync = whichSync + +var isWindows = process.platform === 'win32' || + process.env.OSTYPE === 'cygwin' || + process.env.OSTYPE === 'msys' + +var path = require('path') +var COLON = isWindows ? ';' : ':' +var isexe = require('isexe') + +function getNotFoundError (cmd) { + var er = new Error('not found: ' + cmd) + er.code = 'ENOENT' + + return er +} + +function getPathInfo (cmd, opt) { + var colon = opt.colon || COLON + var pathEnv = opt.path || process.env.PATH || '' + var pathExt = [''] + + pathEnv = pathEnv.split(colon) + + var pathExtExe = '' + if (isWindows) { + pathEnv.unshift(process.cwd()) + pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') + pathExt = pathExtExe.split(colon) + + + // Always test the cmd itself first. isexe will check to make sure + // it's found in the pathExt set. + if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') + pathExt.unshift('') + } + + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) + pathEnv = [''] + + return { + env: pathEnv, + ext: pathExt, + extExe: pathExtExe + } +} + +function which (cmd, opt, cb) { + if (typeof opt === 'function') { + cb = opt + opt = {} + } + + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] + + ;(function F (i, l) { + if (i === l) { + if (opt.all && found.length) + return cb(null, found) + else + return cb(getNotFoundError(cmd)) + } + + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) + + var p = path.join(pathPart, cmd) + if (!pathPart && (/^\.[\\\/]/).test(cmd)) { + p = cmd.slice(0, 2) + p + } + ;(function E (ii, ll) { + if (ii === ll) return F(i + 1, l) + var ext = pathExt[ii] + isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { + if (!er && is) { + if (opt.all) + found.push(p + ext) + else + return cb(null, p + ext) + } + return E(ii + 1, ll) + }) + })(0, pathExt.length) + })(0, pathEnv.length) +} + +function whichSync (cmd, opt) { + opt = opt || {} + + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] + + for (var i = 0, l = pathEnv.length; i < l; i ++) { + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) + + var p = path.join(pathPart, cmd) + if (!pathPart && /^\.[\\\/]/.test(cmd)) { + p = cmd.slice(0, 2) + p + } + for (var j = 0, ll = pathExt.length; j < ll; j ++) { + var cur = p + pathExt[j] + var is + try { + is = isexe.sync(cur, { pathExt: pathExtExe }) + if (is) { + if (opt.all) + found.push(cur) + else + return cur + } + } catch (ex) {} + } + } + + if (opt.all && found.length) + return found + + if (opt.nothrow) + return null + + throw getNotFoundError(cmd) +} diff --git a/node_modules/wide-align/LICENSE b/node_modules/wide-align/LICENSE new file mode 100644 index 00000000..f4be44d8 --- /dev/null +++ b/node_modules/wide-align/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/wide-align/README.md b/node_modules/wide-align/README.md new file mode 100644 index 00000000..32f1be04 --- /dev/null +++ b/node_modules/wide-align/README.md @@ -0,0 +1,47 @@ +wide-align +---------- + +A wide-character aware text alignment function for use in terminals / on the +console. + +### Usage + +``` +var align = require('wide-align') + +// Note that if you view this on a unicode console, all of the slashes are +// aligned. This is because on a console, all narrow characters are +// an en wide and all wide characters are an em. In browsers, this isn't +// held to and wide characters like "古" can be less than two narrow +// characters even with a fixed width font. + +console.log(align.center('abc', 10)) // ' abc ' +console.log(align.center('古古古', 10)) // ' 古古古 ' +console.log(align.left('abc', 10)) // 'abc ' +console.log(align.left('古古古', 10)) // '古古古 ' +console.log(align.right('abc', 10)) // ' abc' +console.log(align.right('古古古', 10)) // ' 古古古' +``` + +### Functions + +#### `align.center(str, length)` → `str` + +Returns *str* with spaces added to both sides such that that it is *length* +chars long and centered in the spaces. + +#### `align.left(str, length)` → `str` + +Returns *str* with spaces to the right such that it is *length* chars long. + +### `align.right(str, length)` → `str` + +Returns *str* with spaces to the left such that it is *length* chars long. + +### Origins + +These functions were originally taken from +[cliui](https://npmjs.com/package/cliui). Changes include switching to the +MUCH faster pad generation function from +[lodash](https://npmjs.com/package/lodash), making center alignment pad +both sides and adding left alignment. diff --git a/node_modules/wide-align/align.js b/node_modules/wide-align/align.js new file mode 100644 index 00000000..4f94ca4c --- /dev/null +++ b/node_modules/wide-align/align.js @@ -0,0 +1,65 @@ +'use strict' +var stringWidth = require('string-width') + +exports.center = alignCenter +exports.left = alignLeft +exports.right = alignRight + +// lodash's way of generating pad characters. + +function createPadding (width) { + var result = '' + var string = ' ' + var n = width + do { + if (n % 2) { + result += string; + } + n = Math.floor(n / 2); + string += string; + } while (n); + + return result; +} + +function alignLeft (str, width) { + var trimmed = str.trimRight() + if (trimmed.length === 0 && str.length >= width) return str + var padding = '' + var strWidth = stringWidth(trimmed) + + if (strWidth < width) { + padding = createPadding(width - strWidth) + } + + return trimmed + padding +} + +function alignRight (str, width) { + var trimmed = str.trimLeft() + if (trimmed.length === 0 && str.length >= width) return str + var padding = '' + var strWidth = stringWidth(trimmed) + + if (strWidth < width) { + padding = createPadding(width - strWidth) + } + + return padding + trimmed +} + +function alignCenter (str, width) { + var trimmed = str.trim() + if (trimmed.length === 0 && str.length >= width) return str + var padLeft = '' + var padRight = '' + var strWidth = stringWidth(trimmed) + + if (strWidth < width) { + var padLeftBy = parseInt((width - strWidth) / 2, 10) + padLeft = createPadding(padLeftBy) + padRight = createPadding(width - (strWidth + padLeftBy)) + } + + return padLeft + trimmed + padRight +} diff --git a/node_modules/wide-align/package.json b/node_modules/wide-align/package.json new file mode 100644 index 00000000..1c31f998 --- /dev/null +++ b/node_modules/wide-align/package.json @@ -0,0 +1,33 @@ +{ + "name": "wide-align", + "version": "1.1.3", + "description": "A wide-character aware text alignment function for use on the console or with fixed width fonts.", + "main": "align.js", + "scripts": { + "test": "tap --coverage test/*.js", + "version": "perl -pi -e 's/^( \"version\": $ENV{npm_config_node_version}\").*?\",/$1abc\",/' package-lock.json ; git add package-lock.json" + }, + "keywords": [ + "wide", + "double", + "unicode", + "cjkv", + "pad", + "align" + ], + "author": "Rebecca Turner (http://re-becca.org/)", + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/iarna/wide-align" + }, + "dependencies": { + "string-width": "^1.0.2 || 2" + }, + "devDependencies": { + "tap": "10 || 11 || 12" + }, + "files": [ + "align.js" + ] +} diff --git a/node_modules/widest-line/index.js b/node_modules/widest-line/index.js new file mode 100644 index 00000000..a9865d00 --- /dev/null +++ b/node_modules/widest-line/index.js @@ -0,0 +1,8 @@ +'use strict'; +const stringWidth = require('string-width'); + +module.exports = input => { + let max = 0; + for (const s of input.split('\n')) max = Math.max(max, stringWidth(s)); + return max; +}; diff --git a/node_modules/widest-line/license b/node_modules/widest-line/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/widest-line/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/widest-line/package.json b/node_modules/widest-line/package.json new file mode 100644 index 00000000..36e155b2 --- /dev/null +++ b/node_modules/widest-line/package.json @@ -0,0 +1,54 @@ +{ + "name": "widest-line", + "version": "2.0.1", + "description": "Get the visual width of the widest line in a string - the number of columns required to display it", + "license": "MIT", + "repository": "sindresorhus/widest-line", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "string", + "str", + "character", + "char", + "unicode", + "width", + "visual", + "column", + "columns", + "fullwidth", + "full-width", + "full", + "ansi", + "escape", + "codes", + "cli", + "command-line", + "terminal", + "console", + "cjk", + "chinese", + "japanese", + "korean", + "fixed-width" + ], + "dependencies": { + "string-width": "^2.1.1" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/widest-line/readme.md b/node_modules/widest-line/readme.md new file mode 100644 index 00000000..20e02d57 --- /dev/null +++ b/node_modules/widest-line/readme.md @@ -0,0 +1,34 @@ +# widest-line [![Build Status](https://travis-ci.org/sindresorhus/widest-line.svg?branch=master)](https://travis-ci.org/sindresorhus/widest-line) + +> Get the visual width of the widest line in a string - the number of columns required to display it + +Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. + +Useful to be able to know the maximum width a string will take up in the terminal. + + +## Install + +``` +$ npm install widest-line +``` + + +## Usage + +```js +const widestLine = require('widest-line'); + +widestLine('古\n\u001B[1m@\u001B[22m'); +//=> 2 +``` + + +## Related + +- [string-width](https://github.com/sindresorhus/string-width) - Get the visual width of a string + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/wrappy/LICENSE b/node_modules/wrappy/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/wrappy/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/wrappy/README.md b/node_modules/wrappy/README.md new file mode 100644 index 00000000..98eab252 --- /dev/null +++ b/node_modules/wrappy/README.md @@ -0,0 +1,36 @@ +# wrappy + +Callback wrapping utility + +## USAGE + +```javascript +var wrappy = require("wrappy") + +// var wrapper = wrappy(wrapperFunction) + +// make sure a cb is called only once +// See also: http://npm.im/once for this specific use case +var once = wrappy(function (cb) { + var called = false + return function () { + if (called) return + called = true + return cb.apply(this, arguments) + } +}) + +function printBoo () { + console.log('boo') +} +// has some rando property +printBoo.iAmBooPrinter = true + +var onlyPrintOnce = once(printBoo) + +onlyPrintOnce() // prints 'boo' +onlyPrintOnce() // does nothing + +// random property is retained! +assert.equal(onlyPrintOnce.iAmBooPrinter, true) +``` diff --git a/node_modules/wrappy/package.json b/node_modules/wrappy/package.json new file mode 100644 index 00000000..13075204 --- /dev/null +++ b/node_modules/wrappy/package.json @@ -0,0 +1,29 @@ +{ + "name": "wrappy", + "version": "1.0.2", + "description": "Callback wrapping utility", + "main": "wrappy.js", + "files": [ + "wrappy.js" + ], + "directories": { + "test": "test" + }, + "dependencies": {}, + "devDependencies": { + "tap": "^2.3.1" + }, + "scripts": { + "test": "tap --coverage test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/wrappy" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/wrappy/issues" + }, + "homepage": "https://github.com/npm/wrappy" +} diff --git a/node_modules/wrappy/wrappy.js b/node_modules/wrappy/wrappy.js new file mode 100644 index 00000000..bb7e7d6f --- /dev/null +++ b/node_modules/wrappy/wrappy.js @@ -0,0 +1,33 @@ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} diff --git a/node_modules/write-file-atomic/CHANGELOG.md b/node_modules/write-file-atomic/CHANGELOG.md new file mode 100644 index 00000000..920ae2cb --- /dev/null +++ b/node_modules/write-file-atomic/CHANGELOG.md @@ -0,0 +1,25 @@ +# 2.4.3 + +* Ignore errors raised by `fs.closeSync` when cleaning up after a write + error. + +# 2.4.2 + +* A pair of patches to fix some fd leaks. We would leak fds with sync use + when errors occured and with async use any time fsync was not in use. (#34) + +# 2.4.1 + +* Fix a bug where `signal-exit` instances would be leaked. This was fixed when addressing #35. + +# 2.4.0 + +## Features + +* Allow chown and mode options to be set to false to disable the defaulting behavior. (#20) +* Support passing encoding strings in options slot for compat with Node.js API. (#31) +* Add support for running inside of worker threads (#37) + +## Fixes + +* Remove unneeded call when returning success (#36) diff --git a/node_modules/write-file-atomic/LICENSE b/node_modules/write-file-atomic/LICENSE new file mode 100644 index 00000000..95e65a77 --- /dev/null +++ b/node_modules/write-file-atomic/LICENSE @@ -0,0 +1,6 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/write-file-atomic/README.md b/node_modules/write-file-atomic/README.md new file mode 100644 index 00000000..ca28e99a --- /dev/null +++ b/node_modules/write-file-atomic/README.md @@ -0,0 +1,56 @@ +write-file-atomic +----------------- + +This is an extension for node's `fs.writeFile` that makes its operation +atomic and allows you set ownership (uid/gid of the file). + +### var writeFileAtomic = require('write-file-atomic')
writeFileAtomic(filename, data, [options], callback) + +* filename **String** +* data **String** | **Buffer** +* options **Object** | **String** + * chown **Object** default, uid & gid of existing file, if any + * uid **Number** + * gid **Number** + * encoding **String** | **Null** default = 'utf8' + * fsync **Boolean** default = true + * mode **Number** default, from existing file, if any + * Promise **Object** default = native Promise object +* callback **Function** + +Atomically and asynchronously writes data to a file, replacing the file if it already +exists. data can be a string or a buffer. + +The file is initially named `filename + "." + murmurhex(__filename, process.pid, ++invocations)`. +Note that `require('worker_threads').threadId` is used in addition to `process.pid` if running inside of a worker thread. +If writeFile completes successfully then, if passed the **chown** option it will change +the ownership of the file. Finally it renames the file back to the filename you specified. If +it encounters errors at any of these steps it will attempt to unlink the temporary file and then +pass the error back to the caller. +If multiple writes are concurrently issued to the same file, the write operations are put into a queue and serialized in the order they were called, using Promises. Native promises are used by default, but you can inject your own promise-like object with the **Promise** option. Writes to different files are still executed in parallel. + +If provided, the **chown** option requires both **uid** and **gid** properties or else +you'll get an error. If **chown** is not specified it will default to using +the owner of the previous file. To prevent chown from being ran you can +also pass `false`, in which case the file will be created with the current user's credentials. + +If **mode** is not specified, it will default to using the permissions from +an existing file, if any. Expicitly setting this to `false` remove this default, resulting +in a file created with the system default permissions. + +If options is a String, it's assumed to be the **encoding** option. The **encoding** option is ignored if **data** is a buffer. It defaults to 'utf8'. + +If the **fsync** option is **false**, writeFile will skip the final fsync call. + +Example: + +```javascript +writeFileAtomic('message.txt', 'Hello Node', {chown:{uid:100,gid:50}}, function (err) { + if (err) throw err; + console.log('It\'s saved!'); +}); +``` + +### var writeFileAtomicSync = require('write-file-atomic').sync
writeFileAtomicSync(filename, data, [options]) + +The synchronous version of **writeFileAtomic**. diff --git a/node_modules/write-file-atomic/index.js b/node_modules/write-file-atomic/index.js new file mode 100644 index 00000000..64ae987c --- /dev/null +++ b/node_modules/write-file-atomic/index.js @@ -0,0 +1,238 @@ +'use strict' +module.exports = writeFile +module.exports.sync = writeFileSync +module.exports._getTmpname = getTmpname // for testing +module.exports._cleanupOnExit = cleanupOnExit + +var fs = require('graceful-fs') +var MurmurHash3 = require('imurmurhash') +var onExit = require('signal-exit') +var path = require('path') +var activeFiles = {} + +// if we run inside of a worker_thread, `process.pid` is not unique +/* istanbul ignore next */ +var threadId = (function getId () { + try { + var workerThreads = require('worker_threads') + + /// if we are in main thread, this is set to `0` + return workerThreads.threadId + } catch (e) { + // worker_threads are not available, fallback to 0 + return 0 + } +})() + +var invocations = 0 +function getTmpname (filename) { + return filename + '.' + + MurmurHash3(__filename) + .hash(String(process.pid)) + .hash(String(threadId)) + .hash(String(++invocations)) + .result() +} + +function cleanupOnExit (tmpfile) { + return function () { + try { + fs.unlinkSync(typeof tmpfile === 'function' ? tmpfile() : tmpfile) + } catch (_) {} + } +} + +function writeFile (filename, data, options, callback) { + if (options) { + if (options instanceof Function) { + callback = options + options = {} + } else if (typeof options === 'string') { + options = { encoding: options } + } + } else { + options = {} + } + + var Promise = options.Promise || global.Promise + var truename + var fd + var tmpfile + /* istanbul ignore next -- The closure only gets called when onExit triggers */ + var removeOnExitHandler = onExit(cleanupOnExit(() => tmpfile)) + var absoluteName = path.resolve(filename) + + new Promise(function serializeSameFile (resolve) { + // make a queue if it doesn't already exist + if (!activeFiles[absoluteName]) activeFiles[absoluteName] = [] + + activeFiles[absoluteName].push(resolve) // add this job to the queue + if (activeFiles[absoluteName].length === 1) resolve() // kick off the first one + }).then(function getRealPath () { + return new Promise(function (resolve) { + fs.realpath(filename, function (_, realname) { + truename = realname || filename + tmpfile = getTmpname(truename) + resolve() + }) + }) + }).then(function stat () { + return new Promise(function stat (resolve) { + if (options.mode && options.chown) resolve() + else { + // Either mode or chown is not explicitly set + // Default behavior is to copy it from original file + fs.stat(truename, function (err, stats) { + if (err || !stats) resolve() + else { + options = Object.assign({}, options) + + if (options.mode == null) { + options.mode = stats.mode + } + if (options.chown == null && process.getuid) { + options.chown = { uid: stats.uid, gid: stats.gid } + } + resolve() + } + }) + } + }) + }).then(function thenWriteFile () { + return new Promise(function (resolve, reject) { + fs.open(tmpfile, 'w', options.mode, function (err, _fd) { + fd = _fd + if (err) reject(err) + else resolve() + }) + }) + }).then(function write () { + return new Promise(function (resolve, reject) { + if (Buffer.isBuffer(data)) { + fs.write(fd, data, 0, data.length, 0, function (err) { + if (err) reject(err) + else resolve() + }) + } else if (data != null) { + fs.write(fd, String(data), 0, String(options.encoding || 'utf8'), function (err) { + if (err) reject(err) + else resolve() + }) + } else resolve() + }) + }).then(function syncAndClose () { + return new Promise(function (resolve, reject) { + if (options.fsync !== false) { + fs.fsync(fd, function (err) { + if (err) fs.close(fd, () => reject(err)) + else fs.close(fd, resolve) + }) + } else { + fs.close(fd, resolve) + } + }) + }).then(function chown () { + fd = null + if (options.chown) { + return new Promise(function (resolve, reject) { + fs.chown(tmpfile, options.chown.uid, options.chown.gid, function (err) { + if (err) reject(err) + else resolve() + }) + }) + } + }).then(function chmod () { + if (options.mode) { + return new Promise(function (resolve, reject) { + fs.chmod(tmpfile, options.mode, function (err) { + if (err) reject(err) + else resolve() + }) + }) + } + }).then(function rename () { + return new Promise(function (resolve, reject) { + fs.rename(tmpfile, truename, function (err) { + if (err) reject(err) + else resolve() + }) + }) + }).then(function success () { + removeOnExitHandler() + callback() + }, function fail (err) { + return new Promise(resolve => { + return fd ? fs.close(fd, resolve) : resolve() + }).then(() => { + removeOnExitHandler() + fs.unlink(tmpfile, function () { + callback(err) + }) + }) + }).then(function checkQueue () { + activeFiles[absoluteName].shift() // remove the element added by serializeSameFile + if (activeFiles[absoluteName].length > 0) { + activeFiles[absoluteName][0]() // start next job if one is pending + } else delete activeFiles[absoluteName] + }) +} + +function writeFileSync (filename, data, options) { + if (typeof options === 'string') options = { encoding: options } + else if (!options) options = {} + try { + filename = fs.realpathSync(filename) + } catch (ex) { + // it's ok, it'll happen on a not yet existing file + } + var tmpfile = getTmpname(filename) + + if (!options.mode || !options.chown) { + // Either mode or chown is not explicitly set + // Default behavior is to copy it from original file + try { + var stats = fs.statSync(filename) + options = Object.assign({}, options) + if (!options.mode) { + options.mode = stats.mode + } + if (!options.chown && process.getuid) { + options.chown = { uid: stats.uid, gid: stats.gid } + } + } catch (ex) { + // ignore stat errors + } + } + + var fd + var cleanup = cleanupOnExit(tmpfile) + var removeOnExitHandler = onExit(cleanup) + + try { + fd = fs.openSync(tmpfile, 'w', options.mode) + if (Buffer.isBuffer(data)) { + fs.writeSync(fd, data, 0, data.length, 0) + } else if (data != null) { + fs.writeSync(fd, String(data), 0, String(options.encoding || 'utf8')) + } + if (options.fsync !== false) { + fs.fsyncSync(fd) + } + fs.closeSync(fd) + if (options.chown) fs.chownSync(tmpfile, options.chown.uid, options.chown.gid) + if (options.mode) fs.chmodSync(tmpfile, options.mode) + fs.renameSync(tmpfile, filename) + removeOnExitHandler() + } catch (err) { + if (fd) { + try { + fs.closeSync(fd) + } catch (ex) { + // ignore close errors at this stage, error may have closed fd already. + } + } + removeOnExitHandler() + cleanup() + throw err + } +} diff --git a/node_modules/write-file-atomic/package.json b/node_modules/write-file-atomic/package.json new file mode 100644 index 00000000..bbb0fa23 --- /dev/null +++ b/node_modules/write-file-atomic/package.json @@ -0,0 +1,41 @@ +{ + "name": "write-file-atomic", + "version": "2.4.3", + "description": "Write files in an atomic fashion w/configurable ownership", + "main": "index.js", + "scripts": { + "test": "standard && tap --100 test/*.js", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "repository": { + "type": "git", + "url": "git@github.com:iarna/write-file-atomic.git" + }, + "keywords": [ + "writeFile", + "atomic" + ], + "author": "Rebecca Turner (http://re-becca.org)", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/write-file-atomic/issues" + }, + "homepage": "https://github.com/iarna/write-file-atomic", + "dependencies": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + }, + "devDependencies": { + "mkdirp": "^0.5.1", + "require-inject": "^1.4.0", + "rimraf": "^2.5.4", + "standard": "^12.0.1", + "tap": "^12.1.3" + }, + "files": [ + "index.js" + ] +} diff --git a/node_modules/xdg-basedir/index.js b/node_modules/xdg-basedir/index.js new file mode 100644 index 00000000..f5aa1769 --- /dev/null +++ b/node_modules/xdg-basedir/index.js @@ -0,0 +1,28 @@ +'use strict'; +const os = require('os'); +const path = require('path'); + +const home = os.homedir(); +const env = process.env; + +exports.data = env.XDG_DATA_HOME || + (home ? path.join(home, '.local', 'share') : null); + +exports.config = env.XDG_CONFIG_HOME || + (home ? path.join(home, '.config') : null); + +exports.cache = env.XDG_CACHE_HOME || (home ? path.join(home, '.cache') : null); + +exports.runtime = env.XDG_RUNTIME_DIR || null; + +exports.dataDirs = (env.XDG_DATA_DIRS || '/usr/local/share/:/usr/share/').split(':'); + +if (exports.data) { + exports.dataDirs.unshift(exports.data); +} + +exports.configDirs = (env.XDG_CONFIG_DIRS || '/etc/xdg').split(':'); + +if (exports.config) { + exports.configDirs.unshift(exports.config); +} diff --git a/node_modules/xdg-basedir/license b/node_modules/xdg-basedir/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/xdg-basedir/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/xdg-basedir/package.json b/node_modules/xdg-basedir/package.json new file mode 100644 index 00000000..08bfa8ca --- /dev/null +++ b/node_modules/xdg-basedir/package.json @@ -0,0 +1,40 @@ +{ + "name": "xdg-basedir", + "version": "3.0.0", + "description": "Get XDG Base Directory paths", + "license": "MIT", + "repository": "sindresorhus/xdg-basedir", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "xdg", + "base", + "directory", + "dir", + "basedir", + "path", + "data", + "config", + "cache", + "linux", + "unix", + "spec" + ], + "devDependencies": { + "ava": "*", + "require-uncached": "^1.0.2", + "xo": "*" + } +} diff --git a/node_modules/xdg-basedir/readme.md b/node_modules/xdg-basedir/readme.md new file mode 100644 index 00000000..61f8c16b --- /dev/null +++ b/node_modules/xdg-basedir/readme.md @@ -0,0 +1,60 @@ +# xdg-basedir [![Build Status](https://travis-ci.org/sindresorhus/xdg-basedir.svg?branch=master)](https://travis-ci.org/sindresorhus/xdg-basedir) + +> Get [XDG Base Directory](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) paths + + +## Install + +``` +$ npm install --save xdg-basedir +``` + + +## Usage + +```js +const xdgBasedir = require('xdg-basedir'); + +xdgBasedir.data; +//=> '/home/sindresorhus/.local/share' + +xdgBasedir.config; +//=> '/home/sindresorhus/.config' + +xdgBasedir.dataDirs +//=> ['/home/sindresorhus/.local/share', '/usr/local/share/', '/usr/share/'] +``` + + +## API + +The properties `.data`, `.config`, `.cache`, `.runtime` will return `null` in the uncommon case that both the XDG environment variable is not set and the users home directory can't be found. You need to handle this case. A common solution is to [fall back to a temp directory](https://github.com/yeoman/configstore/blob/b82690fc401318ad18dcd7d151a0003a4898a314/index.js#L15). + +### .data + +Directory for user specific data files. + +### .config + +Directory for user specific configuration files. + +### .cache + +Directory for user specific non-essential data files. + +### .runtime + +Directory for user-specific non-essential runtime files and other file objects (such as sockets, named pipes, etc). + +### .dataDirs + +Preference-ordered array of base directories to search for data files in addition to `.data`. + +### .configDirs + +Preference-ordered array of base directories to search for configuration files in addition to `.config`. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/yallist/LICENSE b/node_modules/yallist/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/yallist/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/yallist/README.md b/node_modules/yallist/README.md new file mode 100644 index 00000000..f5861018 --- /dev/null +++ b/node_modules/yallist/README.md @@ -0,0 +1,204 @@ +# yallist + +Yet Another Linked List + +There are many doubly-linked list implementations like it, but this +one is mine. + +For when an array would be too big, and a Map can't be iterated in +reverse order. + + +[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist) + +## basic usage + +```javascript +var yallist = require('yallist') +var myList = yallist.create([1, 2, 3]) +myList.push('foo') +myList.unshift('bar') +// of course pop() and shift() are there, too +console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo'] +myList.forEach(function (k) { + // walk the list head to tail +}) +myList.forEachReverse(function (k, index, list) { + // walk the list tail to head +}) +var myDoubledList = myList.map(function (k) { + return k + k +}) +// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo'] +// mapReverse is also a thing +var myDoubledListReverse = myList.mapReverse(function (k) { + return k + k +}) // ['foofoo', 6, 4, 2, 'barbar'] + +var reduced = myList.reduce(function (set, entry) { + set += entry + return set +}, 'start') +console.log(reduced) // 'startfoo123bar' +``` + +## api + +The whole API is considered "public". + +Functions with the same name as an Array method work more or less the +same way. + +There's reverse versions of most things because that's the point. + +### Yallist + +Default export, the class that holds and manages a list. + +Call it with either a forEach-able (like an array) or a set of +arguments, to initialize the list. + +The Array-ish methods all act like you'd expect. No magic length, +though, so if you change that it won't automatically prune or add +empty spots. + +### Yallist.create(..) + +Alias for Yallist function. Some people like factories. + +#### yallist.head + +The first node in the list + +#### yallist.tail + +The last node in the list + +#### yallist.length + +The number of nodes in the list. (Change this at your peril. It is +not magic like Array length.) + +#### yallist.toArray() + +Convert the list to an array. + +#### yallist.forEach(fn, [thisp]) + +Call a function on each item in the list. + +#### yallist.forEachReverse(fn, [thisp]) + +Call a function on each item in the list, in reverse order. + +#### yallist.get(n) + +Get the data at position `n` in the list. If you use this a lot, +probably better off just using an Array. + +#### yallist.getReverse(n) + +Get the data at position `n`, counting from the tail. + +#### yallist.map(fn, thisp) + +Create a new Yallist with the result of calling the function on each +item. + +#### yallist.mapReverse(fn, thisp) + +Same as `map`, but in reverse. + +#### yallist.pop() + +Get the data from the list tail, and remove the tail from the list. + +#### yallist.push(item, ...) + +Insert one or more items to the tail of the list. + +#### yallist.reduce(fn, initialValue) + +Like Array.reduce. + +#### yallist.reduceReverse + +Like Array.reduce, but in reverse. + +#### yallist.reverse + +Reverse the list in place. + +#### yallist.shift() + +Get the data from the list head, and remove the head from the list. + +#### yallist.slice([from], [to]) + +Just like Array.slice, but returns a new Yallist. + +#### yallist.sliceReverse([from], [to]) + +Just like yallist.slice, but the result is returned in reverse. + +#### yallist.toArray() + +Create an array representation of the list. + +#### yallist.toArrayReverse() + +Create a reversed array representation of the list. + +#### yallist.unshift(item, ...) + +Insert one or more items to the head of the list. + +#### yallist.unshiftNode(node) + +Move a Node object to the front of the list. (That is, pull it out of +wherever it lives, and make it the new head.) + +If the node belongs to a different list, then that list will remove it +first. + +#### yallist.pushNode(node) + +Move a Node object to the end of the list. (That is, pull it out of +wherever it lives, and make it the new tail.) + +If the node belongs to a list already, then that list will remove it +first. + +#### yallist.removeNode(node) + +Remove a node from the list, preserving referential integrity of head +and tail and other nodes. + +Will throw an error if you try to have a list remove a node that +doesn't belong to it. + +### Yallist.Node + +The class that holds the data and is actually the list. + +Call with `var n = new Node(value, previousNode, nextNode)` + +Note that if you do direct operations on Nodes themselves, it's very +easy to get into weird states where the list is broken. Be careful :) + +#### node.next + +The next node in the list. + +#### node.prev + +The previous node in the list. + +#### node.value + +The data the node contains. + +#### node.list + +The list to which this node belongs. (Null if it does not belong to +any list.) diff --git a/node_modules/yallist/iterator.js b/node_modules/yallist/iterator.js new file mode 100644 index 00000000..d41c97a1 --- /dev/null +++ b/node_modules/yallist/iterator.js @@ -0,0 +1,8 @@ +'use strict' +module.exports = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } + } +} diff --git a/node_modules/yallist/package.json b/node_modules/yallist/package.json new file mode 100644 index 00000000..c5445938 --- /dev/null +++ b/node_modules/yallist/package.json @@ -0,0 +1,29 @@ +{ + "name": "yallist", + "version": "3.0.3", + "description": "Yet Another Linked List", + "main": "yallist.js", + "directories": { + "test": "test" + }, + "files": [ + "yallist.js", + "iterator.js" + ], + "dependencies": {}, + "devDependencies": { + "tap": "^12.1.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/yallist.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/yallist/yallist.js b/node_modules/yallist/yallist.js new file mode 100644 index 00000000..b0ab36cf --- /dev/null +++ b/node_modules/yallist/yallist.js @@ -0,0 +1,376 @@ +'use strict' +module.exports = Yallist + +Yallist.Node = Node +Yallist.create = Yallist + +function Yallist (list) { + var self = this + if (!(self instanceof Yallist)) { + self = new Yallist() + } + + self.tail = null + self.head = null + self.length = 0 + + if (list && typeof list.forEach === 'function') { + list.forEach(function (item) { + self.push(item) + }) + } else if (arguments.length > 0) { + for (var i = 0, l = arguments.length; i < l; i++) { + self.push(arguments[i]) + } + } + + return self +} + +Yallist.prototype.removeNode = function (node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list') + } + + var next = node.next + var prev = node.prev + + if (next) { + next.prev = prev + } + + if (prev) { + prev.next = next + } + + if (node === this.head) { + this.head = next + } + if (node === this.tail) { + this.tail = prev + } + + node.list.length-- + node.next = null + node.prev = null + node.list = null +} + +Yallist.prototype.unshiftNode = function (node) { + if (node === this.head) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var head = this.head + node.list = this + node.next = head + if (head) { + head.prev = node + } + + this.head = node + if (!this.tail) { + this.tail = node + } + this.length++ +} + +Yallist.prototype.pushNode = function (node) { + if (node === this.tail) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var tail = this.tail + node.list = this + node.prev = tail + if (tail) { + tail.next = node + } + + this.tail = node + if (!this.head) { + this.head = node + } + this.length++ +} + +Yallist.prototype.push = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + push(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.unshift = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + unshift(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.pop = function () { + if (!this.tail) { + return undefined + } + + var res = this.tail.value + this.tail = this.tail.prev + if (this.tail) { + this.tail.next = null + } else { + this.head = null + } + this.length-- + return res +} + +Yallist.prototype.shift = function () { + if (!this.head) { + return undefined + } + + var res = this.head.value + this.head = this.head.next + if (this.head) { + this.head.prev = null + } else { + this.tail = null + } + this.length-- + return res +} + +Yallist.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.head, i = 0; walker !== null; i++) { + fn.call(thisp, walker.value, i, this) + walker = walker.next + } +} + +Yallist.prototype.forEachReverse = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { + fn.call(thisp, walker.value, i, this) + walker = walker.prev + } +} + +Yallist.prototype.get = function (n) { + for (var i = 0, walker = this.head; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.next + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.getReverse = function (n) { + for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.map = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.head; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.next + } + return res +} + +Yallist.prototype.mapReverse = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.tail; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.prev + } + return res +} + +Yallist.prototype.reduce = function (fn, initial) { + var acc + var walker = this.head + if (arguments.length > 1) { + acc = initial + } else if (this.head) { + walker = this.head.next + acc = this.head.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = 0; walker !== null; i++) { + acc = fn(acc, walker.value, i) + walker = walker.next + } + + return acc +} + +Yallist.prototype.reduceReverse = function (fn, initial) { + var acc + var walker = this.tail + if (arguments.length > 1) { + acc = initial + } else if (this.tail) { + walker = this.tail.prev + acc = this.tail.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = this.length - 1; walker !== null; i--) { + acc = fn(acc, walker.value, i) + walker = walker.prev + } + + return acc +} + +Yallist.prototype.toArray = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.head; walker !== null; i++) { + arr[i] = walker.value + walker = walker.next + } + return arr +} + +Yallist.prototype.toArrayReverse = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.tail; walker !== null; i++) { + arr[i] = walker.value + walker = walker.prev + } + return arr +} + +Yallist.prototype.slice = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = 0, walker = this.head; walker !== null && i < from; i++) { + walker = walker.next + } + for (; walker !== null && i < to; i++, walker = walker.next) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.sliceReverse = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { + walker = walker.prev + } + for (; walker !== null && i > from; i--, walker = walker.prev) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.reverse = function () { + var head = this.head + var tail = this.tail + for (var walker = head; walker !== null; walker = walker.prev) { + var p = walker.prev + walker.prev = walker.next + walker.next = p + } + this.head = tail + this.tail = head + return this +} + +function push (self, item) { + self.tail = new Node(item, self.tail, null, self) + if (!self.head) { + self.head = self.tail + } + self.length++ +} + +function unshift (self, item) { + self.head = new Node(item, null, self.head, self) + if (!self.tail) { + self.tail = self.head + } + self.length++ +} + +function Node (value, prev, next, list) { + if (!(this instanceof Node)) { + return new Node(value, prev, next, list) + } + + this.list = list + this.value = value + + if (prev) { + prev.next = this + this.prev = prev + } else { + this.prev = null + } + + if (next) { + next.prev = this + this.next = next + } else { + this.next = null + } +} + +try { + // add if support for Symbol.iterator is present + require('./iterator.js')(Yallist) +} catch (er) {} diff --git a/nodemon.json b/nodemon.json new file mode 100644 index 00000000..d1ba373d --- /dev/null +++ b/nodemon.json @@ -0,0 +1,5 @@ +{ + "execMap": { + "js": "sucrase-node" + } +} diff --git a/package.json b/package.json new file mode 100644 index 00000000..7ccba25f --- /dev/null +++ b/package.json @@ -0,0 +1,16 @@ +{ + "name": "modulo02", + "version": "1.0.0", + "main": "index.js", + "license": "MIT", + "scripts": { + "dev": "nodemon src/server.js" + }, + "dependencies": { + "express": "^4.17.1" + }, + "devDependencies": { + "nodemon": "^1.19.1", + "sucrase": "^3.10.1" + } +} diff --git a/src/app.js b/src/app.js new file mode 100644 index 00000000..75df33dc --- /dev/null +++ b/src/app.js @@ -0,0 +1,19 @@ +import express from "express"; +import routes from "./routes"; +class App { + constructor() { + this.server = express(); + this.middlewares(); + this.routes(); + } + + middlewares() { + this.server.use(express.json()); + } + + routes() { + this.server.use(routes); + } +} + +export default new App().server; diff --git a/src/routes.js b/src/routes.js new file mode 100644 index 00000000..21da88e3 --- /dev/null +++ b/src/routes.js @@ -0,0 +1,9 @@ +import { Router } from "express"; + +const routes = new Router(); + +routes.get("/", (req, res) => { + return res.json({ message: "Hiuiii" }); +}); + +export default routes; diff --git a/src/server.js b/src/server.js new file mode 100644 index 00000000..f9784eae --- /dev/null +++ b/src/server.js @@ -0,0 +1,3 @@ +import app from "./app"; + +app.listen(3333); diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 00000000..b35f6367 --- /dev/null +++ b/yarn.lock @@ -0,0 +1,2073 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +abbrev@1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== + +accepts@~1.3.7: + version "1.3.7" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" + integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== + dependencies: + mime-types "~2.1.24" + negotiator "0.6.2" + +ansi-align@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-2.0.0.tgz#c36aeccba563b89ceb556f3690f0b1d9e3547f7f" + integrity sha1-w2rsy6VjuJzrVW82kPCx2eNUf38= + dependencies: + string-width "^2.0.0" + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= + +ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha1-q8av7tzqUugJzcA3au0845Y10X8= + +anymatch@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" + integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== + dependencies: + micromatch "^3.1.4" + normalize-path "^2.1.1" + +aproba@^1.0.3: + version "1.2.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" + integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== + +are-we-there-yet@~1.1.2: + version "1.1.5" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" + integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== + dependencies: + delegates "^1.0.0" + readable-stream "^2.0.6" + +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= + +arr-flatten@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== + +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= + +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= + +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= + +async-each@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" + integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== + +atob@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== + +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" + +binary-extensions@^1.0.0: + version "1.13.1" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" + integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== + +body-parser@1.19.0: + version "1.19.0" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" + integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== + dependencies: + bytes "3.1.0" + content-type "~1.0.4" + debug "2.6.9" + depd "~1.1.2" + http-errors "1.7.2" + iconv-lite "0.4.24" + on-finished "~2.3.0" + qs "6.7.0" + raw-body "2.4.0" + type-is "~1.6.17" + +boxen@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-1.3.0.tgz#55c6c39a8ba58d9c61ad22cd877532deb665a20b" + integrity sha512-TNPjfTr432qx7yOjQyaXm3dSR0MH9vXp7eT1BFSl/C51g+EFnOR9hTg1IreahGBmDNCehscshe45f+C1TBZbLw== + dependencies: + ansi-align "^2.0.0" + camelcase "^4.0.0" + chalk "^2.0.1" + cli-boxes "^1.0.0" + string-width "^2.0.0" + term-size "^1.2.0" + widest-line "^2.0.0" + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^2.3.1, braces@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + +bytes@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" + integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== + +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" + +camelcase@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" + integrity sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0= + +capture-stack-trace@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz#a6c0bbe1f38f3aa0b92238ecb6ff42c344d4135d" + integrity sha512-mYQLZnx5Qt1JgB1WEiMCf2647plpGeQ2NMR/5L0HNZzGQo4fuSPnK+wjfPnKZV0aiJDgzmWqqkV/g7JD+DW0qw== + +chalk@^2.0.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chokidar@^2.1.5: + version "2.1.6" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.6.tgz#b6cad653a929e244ce8a834244164d241fa954c5" + integrity sha512-V2jUo67OKkc6ySiRpJrjlpJKl9kDuG+Xb8VgsGzb+aEouhgS1D0weyPU4lEzdAcsCAvrih2J2BqyXqHWvVLw5g== + dependencies: + anymatch "^2.0.0" + async-each "^1.0.1" + braces "^2.3.2" + glob-parent "^3.1.0" + inherits "^2.0.3" + is-binary-path "^1.0.0" + is-glob "^4.0.0" + normalize-path "^3.0.0" + path-is-absolute "^1.0.0" + readdirp "^2.2.1" + upath "^1.1.1" + optionalDependencies: + fsevents "^1.2.7" + +chownr@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.2.tgz#a18f1e0b269c8a6a5d3c86eb298beb14c3dd7bf6" + integrity sha512-GkfeAQh+QNy3wquu9oIZr6SS5x7wGdSgNQvD10X3r+AZr1Oys22HW8kAmDMvNg2+Dm0TeGaEuO8gFwdBXxwO8A== + +ci-info@^1.5.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.6.0.tgz#2ca20dbb9ceb32d4524a683303313f0304b1e497" + integrity sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A== + +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" + +cli-boxes@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143" + integrity sha1-T6kXw+WclKAEzWH47lCdplFocUM= + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= + +collection-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= + dependencies: + map-visit "^1.0.0" + object-visit "^1.0.0" + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +commander@^2.19.0: + version "2.20.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.0.tgz#d58bb2b5c1ee8f87b0d340027e9e94e222c5a422" + integrity sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ== + +component-emitter@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" + integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +configstore@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-3.1.2.tgz#c6f25defaeef26df12dd33414b001fe81a543f8f" + integrity sha512-vtv5HtGjcYUgFrXc6Kx747B83MRRVS5R1VTEQoXvuP+kMI+if6uywV0nDGoiydJRy4yk7h9od5Og0kxx4zUXmw== + dependencies: + dot-prop "^4.1.0" + graceful-fs "^4.1.2" + make-dir "^1.0.0" + unique-string "^1.0.0" + write-file-atomic "^2.0.0" + xdg-basedir "^3.0.0" + +console-control-strings@^1.0.0, console-control-strings@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" + integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= + +content-disposition@0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" + integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== + dependencies: + safe-buffer "5.1.2" + +content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= + +cookie@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" + integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== + +copy-descriptor@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= + +core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + +create-error-class@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/create-error-class/-/create-error-class-3.0.2.tgz#06be7abef947a3f14a30fd610671d401bca8b7b6" + integrity sha1-Br56vvlHo/FKMP1hBnHUAbyot7Y= + dependencies: + capture-stack-trace "^1.0.0" + +cross-spawn@^5.0.1: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" + integrity sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk= + dependencies: + lru-cache "^4.0.1" + shebang-command "^1.2.0" + which "^1.2.9" + +crypto-random-string@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e" + integrity sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4= + +debug@2.6.9, debug@^2.2.0, debug@^2.3.3: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^3.1.0, debug@^3.2.6: + version "3.2.6" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" + integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== + dependencies: + ms "^2.1.1" + +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= + +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== + +define-property@^0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= + dependencies: + is-descriptor "^0.1.0" + +define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= + dependencies: + is-descriptor "^1.0.0" + +define-property@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== + dependencies: + is-descriptor "^1.0.2" + isobject "^3.0.1" + +delegates@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" + integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= + +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= + +destroy@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" + integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= + +detect-libc@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" + integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= + +dot-prop@^4.1.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-4.2.0.tgz#1f19e0c2e1aa0e32797c49799f2837ac6af69c57" + integrity sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ== + dependencies: + is-obj "^1.0.0" + +duplexer3@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" + integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= + +execa@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" + integrity sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c= + dependencies: + cross-spawn "^5.0.1" + get-stream "^3.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +express@^4.17.1: + version "4.17.1" + resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" + integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== + dependencies: + accepts "~1.3.7" + array-flatten "1.1.1" + body-parser "1.19.0" + content-disposition "0.5.3" + content-type "~1.0.4" + cookie "0.4.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "~1.1.2" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "~1.1.2" + fresh "0.5.2" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "~2.3.0" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.5" + qs "6.7.0" + range-parser "~1.2.1" + safe-buffer "5.1.2" + send "0.17.1" + serve-static "1.14.1" + setprototypeof "1.1.1" + statuses "~1.5.0" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= + dependencies: + is-extendable "^0.1.0" + +extend-shallow@^3.0.0, extend-shallow@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + +extglob@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + +finalhandler@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" + integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "~2.3.0" + parseurl "~1.3.3" + statuses "~1.5.0" + unpipe "~1.0.0" + +for-in@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= + +forwarded@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" + integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= + +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= + dependencies: + map-cache "^0.2.2" + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= + +fs-minipass@^1.2.5: + version "1.2.6" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.6.tgz#2c5cc30ded81282bfe8a0d7c7c1853ddeb102c07" + integrity sha512-crhvyXcMejjv3Z5d2Fa9sf5xLYVCF5O1c71QxbVnbLsmYMBEvDAftewesN/HhY03YRoA7zOMxjNGrF5svGaaeQ== + dependencies: + minipass "^2.2.1" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +fsevents@^1.2.7: + version "1.2.9" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" + integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== + dependencies: + nan "^2.12.1" + node-pre-gyp "^0.12.0" + +gauge@~2.7.3: + version "2.7.4" + resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" + integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= + dependencies: + aproba "^1.0.3" + console-control-strings "^1.0.0" + has-unicode "^2.0.0" + object-assign "^4.1.0" + signal-exit "^3.0.0" + string-width "^1.0.1" + strip-ansi "^3.0.1" + wide-align "^1.1.0" + +get-stream@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" + integrity sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ= + +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= + +glob-parent@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" + integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= + dependencies: + is-glob "^3.1.0" + path-dirname "^1.0.0" + +glob@^7.1.3: + version "7.1.4" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" + integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-dirs@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-0.1.1.tgz#b319c0dd4607f353f3be9cca4c72fc148c49f445" + integrity sha1-sxnA3UYH81PzvpzKTHL8FIxJ9EU= + dependencies: + ini "^1.3.4" + +got@^6.7.1: + version "6.7.1" + resolved "https://registry.yarnpkg.com/got/-/got-6.7.1.tgz#240cd05785a9a18e561dc1b44b41c763ef1e8db0" + integrity sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA= + dependencies: + create-error-class "^3.0.0" + duplexer3 "^0.1.4" + get-stream "^3.0.0" + is-redirect "^1.0.0" + is-retry-allowed "^1.0.0" + is-stream "^1.0.0" + lowercase-keys "^1.0.0" + safe-buffer "^5.0.1" + timed-out "^4.0.0" + unzip-response "^2.0.1" + url-parse-lax "^1.0.0" + +graceful-fs@^4.1.11, graceful-fs@^4.1.2: + version "4.2.0" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b" + integrity sha512-jpSvDPV4Cq/bgtpndIWbI5hmYxhQGHPC4d4cqBPb4DLniCfhJokdXhwhaDuLBGLQdvvRum/UiX6ECVIPvDXqdg== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-unicode@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" + integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= + +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= + dependencies: + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= + +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + +http-errors@1.7.2: + version "1.7.2" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" + integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.1" + statuses ">= 1.5.0 < 2" + toidentifier "1.0.0" + +http-errors@~1.7.2: + version "1.7.3" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" + integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== + dependencies: + depd "~1.1.2" + inherits "2.0.4" + setprototypeof "1.1.1" + statuses ">= 1.5.0 < 2" + toidentifier "1.0.0" + +iconv-lite@0.4.24, iconv-lite@^0.4.4: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +ignore-by-default@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/ignore-by-default/-/ignore-by-default-1.0.1.tgz#48ca6d72f6c6a3af00a9ad4ae6876be3889e2b09" + integrity sha1-SMptcvbGo68Aqa1K5odr44ieKwk= + +ignore-walk@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.1.tgz#a83e62e7d272ac0e3b551aaa82831a19b69f82f8" + integrity sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ== + dependencies: + minimatch "^3.0.4" + +import-lazy@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43" + integrity sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM= + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= + +ini@^1.3.4, ini@~1.3.0: + version "1.3.5" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== + +ipaddr.js@1.9.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.0.tgz#37df74e430a0e47550fe54a2defe30d8acd95f65" + integrity sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA== + +is-accessor-descriptor@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= + dependencies: + kind-of "^3.0.2" + +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== + dependencies: + kind-of "^6.0.0" + +is-binary-path@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" + integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= + dependencies: + binary-extensions "^1.0.0" + +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + +is-ci@^1.0.10: + version "1.2.1" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.2.1.tgz#e3779c8ee17fccf428488f6e281187f2e632841c" + integrity sha512-s6tfsaQaQi3JNciBH6shVqEDvhGut0SUXr31ag8Pd8BBbVVlcGfWhpPmEOoM6RJ5TFhbypvf5yyRw/VXW1IiWg== + dependencies: + ci-info "^1.5.0" + +is-data-descriptor@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= + dependencies: + kind-of "^3.0.2" + +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== + dependencies: + kind-of "^6.0.0" + +is-descriptor@^0.1.0: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== + dependencies: + is-accessor-descriptor "^0.1.6" + is-data-descriptor "^0.1.4" + kind-of "^5.0.0" + +is-descriptor@^1.0.0, is-descriptor@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== + dependencies: + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" + +is-extendable@^0.1.0, is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= + +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== + dependencies: + is-plain-object "^2.0.4" + +is-extglob@^2.1.0, is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= + +is-glob@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" + integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= + dependencies: + is-extglob "^2.1.0" + +is-glob@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" + integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + dependencies: + is-extglob "^2.1.1" + +is-installed-globally@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.1.0.tgz#0dfd98f5a9111716dd535dda6492f67bf3d25a80" + integrity sha1-Df2Y9akRFxbdU13aZJL2e/PSWoA= + dependencies: + global-dirs "^0.1.0" + is-path-inside "^1.0.0" + +is-npm@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4" + integrity sha1-8vtjpl5JBbQGyGBydloaTceTufQ= + +is-number@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= + dependencies: + kind-of "^3.0.2" + +is-obj@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= + +is-path-inside@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.1.tgz#8ef5b7de50437a3fdca6b4e865ef7aa55cb48036" + integrity sha1-jvW33lBDej/cprToZe96pVy0gDY= + dependencies: + path-is-inside "^1.0.1" + +is-plain-object@^2.0.3, is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-redirect@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24" + integrity sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ= + +is-retry-allowed@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34" + integrity sha1-EaBgVotnM5REAz0BJaYaINVk+zQ= + +is-stream@^1.0.0, is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= + +is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + +isarray@1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= + dependencies: + isarray "1.0.0" + +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= + +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= + dependencies: + is-buffer "^1.1.5" + +kind-of@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= + dependencies: + is-buffer "^1.1.5" + +kind-of@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" + integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== + +latest-version@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-3.1.0.tgz#a205383fea322b33b5ae3b18abee0dc2f356ee15" + integrity sha1-ogU4P+oyKzO1rjsYq+4NwvNW7hU= + dependencies: + package-json "^4.0.0" + +lines-and-columns@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" + integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= + +lowercase-keys@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" + integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== + +lru-cache@^4.0.1: + version "4.1.5" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" + integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== + dependencies: + pseudomap "^1.0.2" + yallist "^2.1.2" + +make-dir@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" + integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== + dependencies: + pify "^3.0.0" + +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= + +map-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= + dependencies: + object-visit "^1.0.0" + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= + +micromatch@^3.1.10, micromatch@^3.1.4: + version "3.1.10" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.1" + define-property "^2.0.2" + extend-shallow "^3.0.2" + extglob "^2.0.4" + fragment-cache "^0.2.1" + kind-of "^6.0.2" + nanomatch "^1.2.9" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.2" + +mime-db@1.40.0: + version "1.40.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.40.0.tgz#a65057e998db090f732a68f6c276d387d4126c32" + integrity sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA== + +mime-types@~2.1.24: + version "2.1.24" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.24.tgz#b6f8d0b3e951efb77dedeca194cff6d16f676f81" + integrity sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ== + dependencies: + mime-db "1.40.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimist@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= + +minimist@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" + integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= + +minipass@^2.2.1, minipass@^2.3.5: + version "2.3.5" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.3.5.tgz#cacebe492022497f656b0f0f51e2682a9ed2d848" + integrity sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA== + dependencies: + safe-buffer "^5.1.2" + yallist "^3.0.0" + +minizlib@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.2.1.tgz#dd27ea6136243c7c880684e8672bb3a45fd9b614" + integrity sha512-7+4oTUOWKg7AuL3vloEWekXY2/D20cevzsrNT2kGWm+39J9hGTCBv8VI5Pm5lXZ/o3/mdR4f8rflAPhnQb8mPA== + dependencies: + minipass "^2.2.1" + +mixin-deep@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" + integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" + +mkdirp@^0.5.0, mkdirp@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= + dependencies: + minimist "0.0.8" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" + integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== + +ms@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + +nan@^2.12.1: + version "2.14.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" + integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== + +nanomatch@^1.2.9: + version "1.2.13" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" + integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^2.0.2" + extend-shallow "^3.0.2" + fragment-cache "^0.2.1" + is-windows "^1.0.2" + kind-of "^6.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +needle@^2.2.1: + version "2.4.0" + resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" + integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== + dependencies: + debug "^3.2.6" + iconv-lite "^0.4.4" + sax "^1.2.4" + +negotiator@0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" + integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== + +node-modules-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40" + integrity sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA= + +node-pre-gyp@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" + integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== + dependencies: + detect-libc "^1.0.2" + mkdirp "^0.5.1" + needle "^2.2.1" + nopt "^4.0.1" + npm-packlist "^1.1.6" + npmlog "^4.0.2" + rc "^1.2.7" + rimraf "^2.6.1" + semver "^5.3.0" + tar "^4" + +nodemon@^1.19.1: + version "1.19.1" + resolved "https://registry.yarnpkg.com/nodemon/-/nodemon-1.19.1.tgz#576f0aad0f863aabf8c48517f6192ff987cd5071" + integrity sha512-/DXLzd/GhiaDXXbGId5BzxP1GlsqtMGM9zTmkWrgXtSqjKmGSbLicM/oAy4FR0YWm14jCHRwnR31AHS2dYFHrg== + dependencies: + chokidar "^2.1.5" + debug "^3.1.0" + ignore-by-default "^1.0.1" + minimatch "^3.0.4" + pstree.remy "^1.1.6" + semver "^5.5.0" + supports-color "^5.2.0" + touch "^3.1.0" + undefsafe "^2.0.2" + update-notifier "^2.5.0" + +nopt@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" + integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= + dependencies: + abbrev "1" + osenv "^0.1.4" + +nopt@~1.0.10: + version "1.0.10" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee" + integrity sha1-bd0hvSoxQXuScn3Vhfim83YI6+4= + dependencies: + abbrev "1" + +normalize-path@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= + dependencies: + remove-trailing-separator "^1.0.1" + +normalize-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +npm-bundled@^1.0.1: + version "1.0.6" + resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" + integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== + +npm-packlist@^1.1.6: + version "1.4.4" + resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.4.tgz#866224233850ac534b63d1a6e76050092b5d2f44" + integrity sha512-zTLo8UcVYtDU3gdeaFu2Xu0n0EvelfHDGuqtNIn5RO7yQj4H1TqNdBc/yZjxnWA0PVB8D3Woyp0i5B43JwQ6Vw== + dependencies: + ignore-walk "^3.0.1" + npm-bundled "^1.0.1" + +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= + dependencies: + path-key "^2.0.0" + +npmlog@^4.0.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" + integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== + dependencies: + are-we-there-yet "~1.1.2" + console-control-strings "~1.1.0" + gauge "~2.7.3" + set-blocking "~2.0.0" + +number-is-nan@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= + +object-assign@^4.0.1, object-assign@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-copy@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= + dependencies: + copy-descriptor "^0.1.0" + define-property "^0.2.5" + kind-of "^3.0.3" + +object-visit@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= + dependencies: + isobject "^3.0.0" + +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= + dependencies: + isobject "^3.0.1" + +on-finished@~2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" + integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= + dependencies: + ee-first "1.1.1" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +os-homedir@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= + +os-tmpdir@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= + +osenv@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" + integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== + dependencies: + os-homedir "^1.0.0" + os-tmpdir "^1.0.0" + +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= + +package-json@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-4.0.1.tgz#8869a0401253661c4c4ca3da6c2121ed555f5eed" + integrity sha1-iGmgQBJTZhxMTKPabCEh7VVfXu0= + dependencies: + got "^6.7.1" + registry-auth-token "^3.0.1" + registry-url "^3.0.3" + semver "^5.1.0" + +parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= + +path-dirname@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" + integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-is-inside@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" + integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= + +path-key@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= + +pify@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" + integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= + +pirates@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87" + integrity sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA== + dependencies: + node-modules-regexp "^1.0.0" + +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= + +prepend-http@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" + integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +proxy-addr@~2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.5.tgz#34cbd64a2d81f4b1fd21e76f9f06c8a45299ee34" + integrity sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ== + dependencies: + forwarded "~0.1.2" + ipaddr.js "1.9.0" + +pseudomap@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= + +pstree.remy@^1.1.6: + version "1.1.7" + resolved "https://registry.yarnpkg.com/pstree.remy/-/pstree.remy-1.1.7.tgz#c76963a28047ed61542dc361aa26ee55a7fa15f3" + integrity sha512-xsMgrUwRpuGskEzBFkH8NmTimbZ5PcPup0LA8JJkHIm2IMUbQcpo3yeLNWVrufEYjh8YwtSVh0xz6UeWc5Oh5A== + +qs@6.7.0: + version "6.7.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" + integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== + +range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" + integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== + dependencies: + bytes "3.1.0" + http-errors "1.7.2" + iconv-lite "0.4.24" + unpipe "1.0.0" + +rc@^1.0.1, rc@^1.1.6, rc@^1.2.7: + version "1.2.8" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +readable-stream@^2.0.2, readable-stream@^2.0.6: + version "2.3.6" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" + integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readdirp@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" + integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== + dependencies: + graceful-fs "^4.1.11" + micromatch "^3.1.10" + readable-stream "^2.0.2" + +regex-not@^1.0.0, regex-not@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== + dependencies: + extend-shallow "^3.0.2" + safe-regex "^1.1.0" + +registry-auth-token@^3.0.1: + version "3.4.0" + resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.4.0.tgz#d7446815433f5d5ed6431cd5dca21048f66b397e" + integrity sha512-4LM6Fw8eBQdwMYcES4yTnn2TqIasbXuwDx3um+QRs7S55aMKCBKBxvPXl2RiUjHwuJLTyYfxSpmfSAjQpcuP+A== + dependencies: + rc "^1.1.6" + safe-buffer "^5.0.1" + +registry-url@^3.0.3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-3.1.0.tgz#3d4ef870f73dde1d77f0cf9a381432444e174942" + integrity sha1-PU74cPc93h138M+aOBQyRE4XSUI= + dependencies: + rc "^1.0.1" + +remove-trailing-separator@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= + +repeat-element@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" + integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== + +repeat-string@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= + +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= + +ret@~0.1.10: + version "0.1.15" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== + +rimraf@^2.6.1: + version "2.6.3" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" + integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== + dependencies: + glob "^7.1.3" + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@^5.0.1, safe-buffer@^5.1.2: + version "5.2.0" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" + integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== + +safe-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= + dependencies: + ret "~0.1.10" + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sax@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +semver-diff@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" + integrity sha1-S7uEN8jTfksM8aaP1ybsbWRdbTY= + dependencies: + semver "^5.0.3" + +semver@^5.0.3, semver@^5.1.0, semver@^5.3.0, semver@^5.5.0: + version "5.7.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.0.tgz#790a7cf6fea5459bac96110b29b60412dc8ff96b" + integrity sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA== + +send@0.17.1: + version "0.17.1" + resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" + integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== + dependencies: + debug "2.6.9" + depd "~1.1.2" + destroy "~1.0.4" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "~1.7.2" + mime "1.6.0" + ms "2.1.1" + on-finished "~2.3.0" + range-parser "~1.2.1" + statuses "~1.5.0" + +serve-static@1.14.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" + integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.17.1" + +set-blocking@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= + +set-value@^2.0.0, set-value@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" + integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.3" + split-string "^3.0.1" + +setprototypeof@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" + integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= + dependencies: + shebang-regex "^1.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= + +signal-exit@^3.0.0, signal-exit@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" + integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= + +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" + +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== + dependencies: + kind-of "^3.2.0" + +snapdragon@^0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== + dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" + source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^3.1.0" + +source-map-resolve@^0.5.0: + version "0.5.2" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" + integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== + dependencies: + atob "^2.1.1" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" + +source-map-url@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" + integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= + +source-map@^0.5.6: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== + dependencies: + extend-shallow "^3.0.0" + +static-extend@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= + dependencies: + define-property "^0.2.5" + object-copy "^0.1.0" + +"statuses@>= 1.5.0 < 2", statuses@~1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= + +string-width@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +"string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^3.0.0, strip-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= + dependencies: + ansi-regex "^3.0.0" + +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= + +sucrase@^3.10.1: + version "3.10.1" + resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.10.1.tgz#70ce0bad0e4c8fbc3c3184dbd1797e82990d0602" + integrity sha512-nMOs6rFWwkYRxcKHHDjyQmC5CmLbHN2LwRyWF1n2i0kb/pq0xcB9M19TdY5Ivfcj1BsWfs+az9Ga5B0tFdE5ww== + dependencies: + commander "^2.19.0" + lines-and-columns "^1.1.6" + mz "^2.7.0" + pirates "^4.0.0" + +supports-color@^5.2.0, supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +tar@^4: + version "4.4.10" + resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.10.tgz#946b2810b9a5e0b26140cf78bea6b0b0d689eba1" + integrity sha512-g2SVs5QIxvo6OLp0GudTqEf05maawKUxXru104iaayWA09551tFCTI8f1Asb4lPfkBr91k07iL4c11XO3/b0tA== + dependencies: + chownr "^1.1.1" + fs-minipass "^1.2.5" + minipass "^2.3.5" + minizlib "^1.2.1" + mkdirp "^0.5.0" + safe-buffer "^5.1.2" + yallist "^3.0.3" + +term-size@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/term-size/-/term-size-1.2.0.tgz#458b83887f288fc56d6fffbfad262e26638efa69" + integrity sha1-RYuDiH8oj8Vtb/+/rSYuJmOO+mk= + dependencies: + execa "^0.7.0" + +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha1-GhkY1ALY/D+Y+/I02wvMjMEOlyY= + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.0" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.0.tgz#e69e38a1babe969b0108207978b9f62b88604839" + integrity sha1-5p44obq+lpsBCCB5eLn2K4hgSDk= + dependencies: + any-promise "^1.0.0" + +timed-out@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" + integrity sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8= + +to-object-path@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= + dependencies: + kind-of "^3.0.2" + +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex@^3.0.1, to-regex@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== + dependencies: + define-property "^2.0.2" + extend-shallow "^3.0.2" + regex-not "^1.0.2" + safe-regex "^1.1.0" + +toidentifier@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" + integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== + +touch@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b" + integrity sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA== + dependencies: + nopt "~1.0.10" + +type-is@~1.6.17, type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +undefsafe@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-2.0.2.tgz#225f6b9e0337663e0d8e7cfd686fc2836ccace76" + integrity sha1-Il9rngM3Zj4Njnz9aG/Cg2zKznY= + dependencies: + debug "^2.2.0" + +union-value@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" + integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^2.0.1" + +unique-string@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-1.0.0.tgz#9e1057cca851abb93398f8b33ae187b99caec11a" + integrity sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo= + dependencies: + crypto-random-string "^1.0.0" + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= + +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + +unzip-response@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-2.0.1.tgz#d2f0f737d16b0615e72a6935ed04214572d56f97" + integrity sha1-0vD3N9FrBhXnKmk17QQhRXLVb5c= + +upath@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/upath/-/upath-1.1.2.tgz#3db658600edaeeccbe6db5e684d67ee8c2acd068" + integrity sha512-kXpym8nmDmlCBr7nKdIx8P2jNBa+pBpIUFRnKJ4dr8htyYGJFokkr2ZvERRtUN+9SY+JqXouNgUPtv6JQva/2Q== + +update-notifier@^2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-2.5.0.tgz#d0744593e13f161e406acb1d9408b72cad08aff6" + integrity sha512-gwMdhgJHGuj/+wHJJs9e6PcCszpxR1b236igrOkUofGhqJuG+amlIKwApH1IW1WWl7ovZxsX49lMBWLxSdm5Dw== + dependencies: + boxen "^1.2.1" + chalk "^2.0.1" + configstore "^3.0.0" + import-lazy "^2.1.0" + is-ci "^1.0.10" + is-installed-globally "^0.1.0" + is-npm "^1.0.0" + latest-version "^3.0.0" + semver-diff "^2.0.0" + xdg-basedir "^3.0.0" + +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= + +url-parse-lax@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" + integrity sha1-evjzA2Rem9eaJy56FKxovAYJ2nM= + dependencies: + prepend-http "^1.0.1" + +use@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" + integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= + +vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= + +which@^1.2.9: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +wide-align@^1.1.0: + version "1.1.3" + resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" + integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== + dependencies: + string-width "^1.0.2 || 2" + +widest-line@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-2.0.1.tgz#7438764730ec7ef4381ce4df82fb98a53142a3fc" + integrity sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA== + dependencies: + string-width "^2.1.1" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +write-file-atomic@^2.0.0: + version "2.4.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.4.3.tgz#1fd2e9ae1df3e75b8d8c367443c692d4ca81f481" + integrity sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ== + dependencies: + graceful-fs "^4.1.11" + imurmurhash "^0.1.4" + signal-exit "^3.0.2" + +xdg-basedir@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4" + integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ= + +yallist@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" + integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= + +yallist@^3.0.0, yallist@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.3.tgz#b4b049e314be545e3ce802236d6cd22cd91c3de9" + integrity sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==